diff --git hbase-rpc/pom.xml hbase-rpc/pom.xml new file mode 100644 index 0000000..a699be4 --- /dev/null +++ hbase-rpc/pom.xml @@ -0,0 +1,85 @@ + + + 4.0.0 + + hbase + org.apache.hbase + 0.95-SNAPSHOT + .. + + + hbase-rpc + HBase - RPC + RPC Classes of HBase + + + + + maven-surefire-plugin + + + + listener + org.apache.hadoop.hbase.ResourceCheckerJUnitListener + + + + + + + secondPartTestsExecution + test + + test + + + true + + + + + + + + + + + com.google.protobuf + protobuf-java + + + + + + + skip-rpc-tests + + + skip-rpc-tests + + + + true + + + + \ No newline at end of file diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java new file mode 100644 index 0000000..b246e53 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java @@ -0,0 +1,6553 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: AccessControl.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class AccessControlProtos { + private AccessControlProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface PermissionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Permission.Action action = 1; + java.util.List getActionList(); + int getActionCount(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index); + + // optional bytes table = 2; + boolean hasTable(); + com.google.protobuf.ByteString getTable(); + + // optional bytes family = 3; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // optional bytes qualifier = 4; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + } + public static final class Permission extends + com.google.protobuf.GeneratedMessage + implements PermissionOrBuilder { + // Use Permission.newBuilder() to construct. + private Permission(Builder builder) { + super(builder); + } + private Permission(boolean noInit) {} + + private static final Permission defaultInstance; + public static Permission getDefaultInstance() { + return defaultInstance; + } + + public Permission getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable; + } + + public enum Action + implements com.google.protobuf.ProtocolMessageEnum { + READ(0, 0), + WRITE(1, 1), + EXEC(2, 2), + CREATE(3, 3), + ADMIN(4, 4), + ; + + public static final int READ_VALUE = 0; + public static final int WRITE_VALUE = 1; + public static final int EXEC_VALUE = 2; + public static final int CREATE_VALUE = 3; + public static final int ADMIN_VALUE = 4; + + + public final int getNumber() { return value; } + + public static Action valueOf(int value) { + switch (value) { + case 0: return READ; + case 1: return WRITE; + case 2: return EXEC; + case 3: return CREATE; + case 4: return ADMIN; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Action findValueByNumber(int number) { + return Action.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor().getEnumTypes().get(0); + } + + private static final Action[] VALUES = { + READ, WRITE, EXEC, CREATE, ADMIN, + }; + + public static Action valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Action(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Permission.Action) + } + + private int bitField0_; + // repeated .Permission.Action action = 1; + public static final int ACTION_FIELD_NUMBER = 1; + private java.util.List action_; + public java.util.List getActionList() { + return action_; + } + public int getActionCount() { + return action_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) { + return action_.get(index); + } + + // optional bytes table = 2; + public static final int TABLE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString table_; + public boolean hasTable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTable() { + return table_; + } + + // optional bytes family = 3; + public static final int FAMILY_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // optional bytes qualifier = 4; + public static final int QUALIFIER_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + private void initFields() { + action_ = java.util.Collections.emptyList(); + table_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < action_.size(); i++) { + output.writeEnum(1, action_.get(i).getNumber()); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(2, table_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(3, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(4, qualifier_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < action_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(action_.get(i).getNumber()); + } + size += dataSize; + size += 1 * action_.size(); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, table_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, qualifier_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) obj; + + boolean result = true; + result = result && getActionList() + .equals(other.getActionList()); + result = result && (hasTable() == other.hasTable()); + if (hasTable()) { + result = result && getTable() + .equals(other.getTable()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getActionCount() > 0) { + hash = (37 * hash) + ACTION_FIELD_NUMBER; + hash = (53 * hash) + hashEnumList(getActionList()); + } + if (hasTable()) { + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + table_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + action_ = java.util.Collections.unmodifiableList(action_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.action_ = action_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.table_ = table_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) return this; + if (!other.action_.isEmpty()) { + if (action_.isEmpty()) { + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureActionIsMutable(); + action_.addAll(other.action_); + } + onChanged(); + } + if (other.hasTable()) { + setTable(other.getTable()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addAction(value); + } + break; + } + case 10: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addAction(value); + } + } + input.popLimit(oldLimit); + break; + } + case 18: { + bitField0_ |= 0x00000002; + table_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + family_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + qualifier_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Permission.Action action = 1; + private java.util.List action_ = + java.util.Collections.emptyList(); + private void ensureActionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + action_ = new java.util.ArrayList(action_); + bitField0_ |= 0x00000001; + } + } + public java.util.List getActionList() { + return java.util.Collections.unmodifiableList(action_); + } + public int getActionCount() { + return action_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) { + return action_.get(index); + } + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.set(index, value); + onChanged(); + return this; + } + public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.add(value); + onChanged(); + return this; + } + public Builder addAllAction( + java.lang.Iterable values) { + ensureActionIsMutable(); + super.addAll(values, action_); + onChanged(); + return this; + } + public Builder clearAction() { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // optional bytes table = 2; + private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTable() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTable() { + return table_; + } + public Builder setTable(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + table_ = value; + onChanged(); + return this; + } + public Builder clearTable() { + bitField0_ = (bitField0_ & ~0x00000002); + table_ = getDefaultInstance().getTable(); + onChanged(); + return this; + } + + // optional bytes family = 3; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000004); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // optional bytes qualifier = 4; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000008); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Permission) + } + + static { + defaultInstance = new Permission(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Permission) + } + + public interface UserPermissionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + boolean hasUser(); + com.google.protobuf.ByteString getUser(); + + // required .Permission permission = 2; + boolean hasPermission(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder(); + } + public static final class UserPermission extends + com.google.protobuf.GeneratedMessage + implements UserPermissionOrBuilder { + // Use UserPermission.newBuilder() to construct. + private UserPermission(Builder builder) { + super(builder); + } + private UserPermission(boolean noInit) {} + + private static final UserPermission defaultInstance; + public static UserPermission getDefaultInstance() { + return defaultInstance; + } + + public UserPermission getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // required .Permission permission = 2; + public static final int PERMISSION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_; + public boolean hasPermission() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() { + return permission_; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() { + return permission_; + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasPermission()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, permission_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, permission_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && (hasPermission() == other.hasPermission()); + if (hasPermission()) { + result = result && getPermission() + .equals(other.getPermission()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (hasPermission()) { + hash = (37 * hash) + PERMISSION_FIELD_NUMBER; + hash = (53 * hash) + getPermission().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (permissionBuilder_ == null) { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); + } else { + permissionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (permissionBuilder_ == null) { + result.permission_ = permission_; + } else { + result.permission_ = permissionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (other.hasPermission()) { + mergePermission(other.getPermission()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + if (!hasPermission()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); + if (hasPermission()) { + subBuilder.mergeFrom(getPermission()); + } + input.readMessage(subBuilder, extensionRegistry); + setPermission(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getUser() { + return user_; + } + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // required .Permission permission = 2; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_; + public boolean hasPermission() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() { + if (permissionBuilder_ == null) { + return permission_; + } else { + return permissionBuilder_.getMessage(); + } + } + public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + permission_ = value; + onChanged(); + } else { + permissionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setPermission( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionBuilder_ == null) { + permission_ = builderForValue.build(); + onChanged(); + } else { + permissionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) { + permission_ = + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(permission_).mergeFrom(value).buildPartial(); + } else { + permission_ = value; + } + onChanged(); + } else { + permissionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearPermission() { + if (permissionBuilder_ == null) { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); + onChanged(); + } else { + permissionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getPermissionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() { + if (permissionBuilder_ != null) { + return permissionBuilder_.getMessageOrBuilder(); + } else { + return permission_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> + getPermissionFieldBuilder() { + if (permissionBuilder_ == null) { + permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>( + permission_, + getParentForChildren(), + isClean()); + permission_ = null; + } + return permissionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:UserPermission) + } + + static { + defaultInstance = new UserPermission(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserPermission) + } + + public interface UserTablePermissionsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .UserTablePermissions.UserPermissions permissions = 1; + java.util.List + getPermissionsList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index); + int getPermissionsCount(); + java.util.List + getPermissionsOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( + int index); + } + public static final class UserTablePermissions extends + com.google.protobuf.GeneratedMessage + implements UserTablePermissionsOrBuilder { + // Use UserTablePermissions.newBuilder() to construct. + private UserTablePermissions(Builder builder) { + super(builder); + } + private UserTablePermissions(boolean noInit) {} + + private static final UserTablePermissions defaultInstance; + public static UserTablePermissions getDefaultInstance() { + return defaultInstance; + } + + public UserTablePermissions getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable; + } + + public interface UserPermissionsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + boolean hasUser(); + com.google.protobuf.ByteString getUser(); + + // repeated .Permission permissions = 2; + java.util.List + getPermissionsList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index); + int getPermissionsCount(); + java.util.List + getPermissionsOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( + int index); + } + public static final class UserPermissions extends + com.google.protobuf.GeneratedMessage + implements UserPermissionsOrBuilder { + // Use UserPermissions.newBuilder() to construct. + private UserPermissions(Builder builder) { + super(builder); + } + private UserPermissions(boolean noInit) {} + + private static final UserPermissions defaultInstance; + public static UserPermissions getDefaultInstance() { + return defaultInstance; + } + + public UserPermissions getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated .Permission permissions = 2; + public static final int PERMISSIONS_FIELD_NUMBER = 2; + private java.util.List permissions_; + public java.util.List getPermissionsList() { + return permissions_; + } + public java.util.List + getPermissionsOrBuilderList() { + return permissions_; + } + public int getPermissionsCount() { + return permissions_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) { + return permissions_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( + int index) { + return permissions_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + permissions_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < permissions_.size(); i++) { + output.writeMessage(2, permissions_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + for (int i = 0; i < permissions_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, permissions_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getPermissionsList() + .equals(other.getPermissionsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getPermissionsCount() > 0) { + hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER; + hash = (53 * hash) + getPermissionsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (permissionsBuilder_ == null) { + permissions_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + permissionsBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (permissionsBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + permissions_ = java.util.Collections.unmodifiableList(permissions_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.permissions_ = permissions_; + } else { + result.permissions_ = permissionsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (permissionsBuilder_ == null) { + if (!other.permissions_.isEmpty()) { + if (permissions_.isEmpty()) { + permissions_ = other.permissions_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensurePermissionsIsMutable(); + permissions_.addAll(other.permissions_); + } + onChanged(); + } + } else { + if (!other.permissions_.isEmpty()) { + if (permissionsBuilder_.isEmpty()) { + permissionsBuilder_.dispose(); + permissionsBuilder_ = null; + permissions_ = other.permissions_; + bitField0_ = (bitField0_ & ~0x00000002); + permissionsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPermissionsFieldBuilder() : null; + } else { + permissionsBuilder_.addAllMessages(other.permissions_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addPermissions(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getUser() { + return user_; + } + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated .Permission permissions = 2; + private java.util.List permissions_ = + java.util.Collections.emptyList(); + private void ensurePermissionsIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + permissions_ = new java.util.ArrayList(permissions_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionsBuilder_; + + public java.util.List getPermissionsList() { + if (permissionsBuilder_ == null) { + return java.util.Collections.unmodifiableList(permissions_); + } else { + return permissionsBuilder_.getMessageList(); + } + } + public int getPermissionsCount() { + if (permissionsBuilder_ == null) { + return permissions_.size(); + } else { + return permissionsBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) { + if (permissionsBuilder_ == null) { + return permissions_.get(index); + } else { + return permissionsBuilder_.getMessage(index); + } + } + public Builder setPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionsIsMutable(); + permissions_.set(index, value); + onChanged(); + } else { + permissionsBuilder_.setMessage(index, value); + } + return this; + } + public Builder setPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.set(index, builderForValue.build()); + onChanged(); + } else { + permissionsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionsIsMutable(); + permissions_.add(value); + onChanged(); + } else { + permissionsBuilder_.addMessage(value); + } + return this; + } + public Builder addPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionsIsMutable(); + permissions_.add(index, value); + onChanged(); + } else { + permissionsBuilder_.addMessage(index, value); + } + return this; + } + public Builder addPermissions( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.add(builderForValue.build()); + onChanged(); + } else { + permissionsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.add(index, builderForValue.build()); + onChanged(); + } else { + permissionsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllPermissions( + java.lang.Iterable values) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + super.addAll(values, permissions_); + onChanged(); + } else { + permissionsBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearPermissions() { + if (permissionsBuilder_ == null) { + permissions_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + permissionsBuilder_.clear(); + } + return this; + } + public Builder removePermissions(int index) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.remove(index); + onChanged(); + } else { + permissionsBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionsBuilder( + int index) { + return getPermissionsFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( + int index) { + if (permissionsBuilder_ == null) { + return permissions_.get(index); } else { + return permissionsBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPermissionsOrBuilderList() { + if (permissionsBuilder_ != null) { + return permissionsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(permissions_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder() { + return getPermissionsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder( + int index) { + return getPermissionsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); + } + public java.util.List + getPermissionsBuilderList() { + return getPermissionsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> + getPermissionsFieldBuilder() { + if (permissionsBuilder_ == null) { + permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>( + permissions_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + permissions_ = null; + } + return permissionsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:UserTablePermissions.UserPermissions) + } + + static { + defaultInstance = new UserPermissions(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserTablePermissions.UserPermissions) + } + + // repeated .UserTablePermissions.UserPermissions permissions = 1; + public static final int PERMISSIONS_FIELD_NUMBER = 1; + private java.util.List permissions_; + public java.util.List getPermissionsList() { + return permissions_; + } + public java.util.List + getPermissionsOrBuilderList() { + return permissions_; + } + public int getPermissionsCount() { + return permissions_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) { + return permissions_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( + int index) { + return permissions_.get(index); + } + + private void initFields() { + permissions_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getPermissionsCount(); i++) { + if (!getPermissions(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < permissions_.size(); i++) { + output.writeMessage(1, permissions_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < permissions_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, permissions_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) obj; + + boolean result = true; + result = result && getPermissionsList() + .equals(other.getPermissionsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getPermissionsCount() > 0) { + hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER; + hash = (53 * hash) + getPermissionsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (permissionsBuilder_ == null) { + permissions_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + permissionsBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions(this); + int from_bitField0_ = bitField0_; + if (permissionsBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + permissions_ = java.util.Collections.unmodifiableList(permissions_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.permissions_ = permissions_; + } else { + result.permissions_ = permissionsBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance()) return this; + if (permissionsBuilder_ == null) { + if (!other.permissions_.isEmpty()) { + if (permissions_.isEmpty()) { + permissions_ = other.permissions_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePermissionsIsMutable(); + permissions_.addAll(other.permissions_); + } + onChanged(); + } + } else { + if (!other.permissions_.isEmpty()) { + if (permissionsBuilder_.isEmpty()) { + permissionsBuilder_.dispose(); + permissionsBuilder_ = null; + permissions_ = other.permissions_; + bitField0_ = (bitField0_ & ~0x00000001); + permissionsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPermissionsFieldBuilder() : null; + } else { + permissionsBuilder_.addAllMessages(other.permissions_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getPermissionsCount(); i++) { + if (!getPermissions(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addPermissions(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .UserTablePermissions.UserPermissions permissions = 1; + private java.util.List permissions_ = + java.util.Collections.emptyList(); + private void ensurePermissionsIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + permissions_ = new java.util.ArrayList(permissions_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder> permissionsBuilder_; + + public java.util.List getPermissionsList() { + if (permissionsBuilder_ == null) { + return java.util.Collections.unmodifiableList(permissions_); + } else { + return permissionsBuilder_.getMessageList(); + } + } + public int getPermissionsCount() { + if (permissionsBuilder_ == null) { + return permissions_.size(); + } else { + return permissionsBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) { + if (permissionsBuilder_ == null) { + return permissions_.get(index); + } else { + return permissionsBuilder_.getMessage(index); + } + } + public Builder setPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { + if (permissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionsIsMutable(); + permissions_.set(index, value); + onChanged(); + } else { + permissionsBuilder_.setMessage(index, value); + } + return this; + } + public Builder setPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.set(index, builderForValue.build()); + onChanged(); + } else { + permissionsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { + if (permissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionsIsMutable(); + permissions_.add(value); + onChanged(); + } else { + permissionsBuilder_.addMessage(value); + } + return this; + } + public Builder addPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { + if (permissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionsIsMutable(); + permissions_.add(index, value); + onChanged(); + } else { + permissionsBuilder_.addMessage(index, value); + } + return this; + } + public Builder addPermissions( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.add(builderForValue.build()); + onChanged(); + } else { + permissionsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addPermissions( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.add(index, builderForValue.build()); + onChanged(); + } else { + permissionsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllPermissions( + java.lang.Iterable values) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + super.addAll(values, permissions_); + onChanged(); + } else { + permissionsBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearPermissions() { + if (permissionsBuilder_ == null) { + permissions_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + permissionsBuilder_.clear(); + } + return this; + } + public Builder removePermissions(int index) { + if (permissionsBuilder_ == null) { + ensurePermissionsIsMutable(); + permissions_.remove(index); + onChanged(); + } else { + permissionsBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder getPermissionsBuilder( + int index) { + return getPermissionsFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( + int index) { + if (permissionsBuilder_ == null) { + return permissions_.get(index); } else { + return permissionsBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPermissionsOrBuilderList() { + if (permissionsBuilder_ != null) { + return permissionsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(permissions_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder() { + return getPermissionsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder( + int index) { + return getPermissionsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()); + } + public java.util.List + getPermissionsBuilderList() { + return getPermissionsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder> + getPermissionsFieldBuilder() { + if (permissionsBuilder_ == null) { + permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>( + permissions_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + permissions_ = null; + } + return permissionsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:UserTablePermissions) + } + + static { + defaultInstance = new UserTablePermissions(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserTablePermissions) + } + + public interface GrantRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .UserPermission permission = 1; + boolean hasPermission(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(); + } + public static final class GrantRequest extends + com.google.protobuf.GeneratedMessage + implements GrantRequestOrBuilder { + // Use GrantRequest.newBuilder() to construct. + private GrantRequest(Builder builder) { + super(builder); + } + private GrantRequest(boolean noInit) {} + + private static final GrantRequest defaultInstance; + public static GrantRequest getDefaultInstance() { + return defaultInstance; + } + + public GrantRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .UserPermission permission = 1; + public static final int PERMISSION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_; + public boolean hasPermission() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { + return permission_; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { + return permission_; + } + + private void initFields() { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPermission()) { + memoizedIsInitialized = 0; + return false; + } + if (!getPermission().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, permission_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, permission_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) obj; + + boolean result = true; + result = result && (hasPermission() == other.hasPermission()); + if (hasPermission()) { + result = result && getPermission() + .equals(other.getPermission()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPermission()) { + hash = (37 * hash) + PERMISSION_FIELD_NUMBER; + hash = (53 * hash) + getPermission().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (permissionBuilder_ == null) { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + } else { + permissionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (permissionBuilder_ == null) { + result.permission_ = permission_; + } else { + result.permission_ = permissionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance()) return this; + if (other.hasPermission()) { + mergePermission(other.getPermission()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPermission()) { + + return false; + } + if (!getPermission().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); + if (hasPermission()) { + subBuilder.mergeFrom(getPermission()); + } + input.readMessage(subBuilder, extensionRegistry); + setPermission(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .UserPermission permission = 1; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; + public boolean hasPermission() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { + if (permissionBuilder_ == null) { + return permission_; + } else { + return permissionBuilder_.getMessage(); + } + } + public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + permission_ = value; + onChanged(); + } else { + permissionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setPermission( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { + if (permissionBuilder_ == null) { + permission_ = builderForValue.build(); + onChanged(); + } else { + permissionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) { + permission_ = + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(permission_).mergeFrom(value).buildPartial(); + } else { + permission_ = value; + } + onChanged(); + } else { + permissionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearPermission() { + if (permissionBuilder_ == null) { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + onChanged(); + } else { + permissionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getPermissionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { + if (permissionBuilder_ != null) { + return permissionBuilder_.getMessageOrBuilder(); + } else { + return permission_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> + getPermissionFieldBuilder() { + if (permissionBuilder_ == null) { + permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>( + permission_, + getParentForChildren(), + isClean()); + permission_ = null; + } + return permissionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GrantRequest) + } + + static { + defaultInstance = new GrantRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GrantRequest) + } + + public interface GrantResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GrantResponse extends + com.google.protobuf.GeneratedMessage + implements GrantResponseOrBuilder { + // Use GrantResponse.newBuilder() to construct. + private GrantResponse(Builder builder) { + super(builder); + } + private GrantResponse(boolean noInit) {} + + private static final GrantResponse defaultInstance; + public static GrantResponse getDefaultInstance() { + return defaultInstance; + } + + public GrantResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GrantResponse) + } + + static { + defaultInstance = new GrantResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GrantResponse) + } + + public interface RevokeRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .UserPermission permission = 1; + boolean hasPermission(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(); + } + public static final class RevokeRequest extends + com.google.protobuf.GeneratedMessage + implements RevokeRequestOrBuilder { + // Use RevokeRequest.newBuilder() to construct. + private RevokeRequest(Builder builder) { + super(builder); + } + private RevokeRequest(boolean noInit) {} + + private static final RevokeRequest defaultInstance; + public static RevokeRequest getDefaultInstance() { + return defaultInstance; + } + + public RevokeRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .UserPermission permission = 1; + public static final int PERMISSION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_; + public boolean hasPermission() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { + return permission_; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { + return permission_; + } + + private void initFields() { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPermission()) { + memoizedIsInitialized = 0; + return false; + } + if (!getPermission().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, permission_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, permission_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) obj; + + boolean result = true; + result = result && (hasPermission() == other.hasPermission()); + if (hasPermission()) { + result = result && getPermission() + .equals(other.getPermission()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPermission()) { + hash = (37 * hash) + PERMISSION_FIELD_NUMBER; + hash = (53 * hash) + getPermission().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (permissionBuilder_ == null) { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + } else { + permissionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (permissionBuilder_ == null) { + result.permission_ = permission_; + } else { + result.permission_ = permissionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance()) return this; + if (other.hasPermission()) { + mergePermission(other.getPermission()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPermission()) { + + return false; + } + if (!getPermission().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); + if (hasPermission()) { + subBuilder.mergeFrom(getPermission()); + } + input.readMessage(subBuilder, extensionRegistry); + setPermission(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .UserPermission permission = 1; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; + public boolean hasPermission() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { + if (permissionBuilder_ == null) { + return permission_; + } else { + return permissionBuilder_.getMessage(); + } + } + public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + permission_ = value; + onChanged(); + } else { + permissionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setPermission( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { + if (permissionBuilder_ == null) { + permission_ = builderForValue.build(); + onChanged(); + } else { + permissionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) { + permission_ = + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(permission_).mergeFrom(value).buildPartial(); + } else { + permission_ = value; + } + onChanged(); + } else { + permissionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearPermission() { + if (permissionBuilder_ == null) { + permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); + onChanged(); + } else { + permissionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getPermissionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { + if (permissionBuilder_ != null) { + return permissionBuilder_.getMessageOrBuilder(); + } else { + return permission_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> + getPermissionFieldBuilder() { + if (permissionBuilder_ == null) { + permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>( + permission_, + getParentForChildren(), + isClean()); + permission_ = null; + } + return permissionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RevokeRequest) + } + + static { + defaultInstance = new RevokeRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RevokeRequest) + } + + public interface RevokeResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class RevokeResponse extends + com.google.protobuf.GeneratedMessage + implements RevokeResponseOrBuilder { + // Use RevokeResponse.newBuilder() to construct. + private RevokeResponse(Builder builder) { + super(builder); + } + private RevokeResponse(boolean noInit) {} + + private static final RevokeResponse defaultInstance; + public static RevokeResponse getDefaultInstance() { + return defaultInstance; + } + + public RevokeResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:RevokeResponse) + } + + static { + defaultInstance = new RevokeResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RevokeResponse) + } + + public interface UserPermissionsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes table = 1; + boolean hasTable(); + com.google.protobuf.ByteString getTable(); + } + public static final class UserPermissionsRequest extends + com.google.protobuf.GeneratedMessage + implements UserPermissionsRequestOrBuilder { + // Use UserPermissionsRequest.newBuilder() to construct. + private UserPermissionsRequest(Builder builder) { + super(builder); + } + private UserPermissionsRequest(boolean noInit) {} + + private static final UserPermissionsRequest defaultInstance; + public static UserPermissionsRequest getDefaultInstance() { + return defaultInstance; + } + + public UserPermissionsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes table = 1; + public static final int TABLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString table_; + public boolean hasTable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTable() { + return table_; + } + + private void initFields() { + table_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTable()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, table_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, table_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) obj; + + boolean result = true; + result = result && (hasTable() == other.hasTable()); + if (hasTable()) { + result = result && getTable() + .equals(other.getTable()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTable()) { + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + table_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.table_ = table_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance()) return this; + if (other.hasTable()) { + setTable(other.getTable()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTable()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + table_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes table = 1; + private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTable() { + return table_; + } + public Builder setTable(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + table_ = value; + onChanged(); + return this; + } + public Builder clearTable() { + bitField0_ = (bitField0_ & ~0x00000001); + table_ = getDefaultInstance().getTable(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UserPermissionsRequest) + } + + static { + defaultInstance = new UserPermissionsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserPermissionsRequest) + } + + public interface UserPermissionsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .UserPermission permission = 1; + java.util.List + getPermissionList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index); + int getPermissionCount(); + java.util.List + getPermissionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( + int index); + } + public static final class UserPermissionsResponse extends + com.google.protobuf.GeneratedMessage + implements UserPermissionsResponseOrBuilder { + // Use UserPermissionsResponse.newBuilder() to construct. + private UserPermissionsResponse(Builder builder) { + super(builder); + } + private UserPermissionsResponse(boolean noInit) {} + + private static final UserPermissionsResponse defaultInstance; + public static UserPermissionsResponse getDefaultInstance() { + return defaultInstance; + } + + public UserPermissionsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable; + } + + // repeated .UserPermission permission = 1; + public static final int PERMISSION_FIELD_NUMBER = 1; + private java.util.List permission_; + public java.util.List getPermissionList() { + return permission_; + } + public java.util.List + getPermissionOrBuilderList() { + return permission_; + } + public int getPermissionCount() { + return permission_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) { + return permission_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( + int index) { + return permission_.get(index); + } + + private void initFields() { + permission_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getPermissionCount(); i++) { + if (!getPermission(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < permission_.size(); i++) { + output.writeMessage(1, permission_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < permission_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, permission_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) obj; + + boolean result = true; + result = result && getPermissionList() + .equals(other.getPermissionList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getPermissionCount() > 0) { + hash = (37 * hash) + PERMISSION_FIELD_NUMBER; + hash = (53 * hash) + getPermissionList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (permissionBuilder_ == null) { + permission_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + permissionBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse(this); + int from_bitField0_ = bitField0_; + if (permissionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = java.util.Collections.unmodifiableList(permission_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.permission_ = permission_; + } else { + result.permission_ = permissionBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()) return this; + if (permissionBuilder_ == null) { + if (!other.permission_.isEmpty()) { + if (permission_.isEmpty()) { + permission_ = other.permission_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePermissionIsMutable(); + permission_.addAll(other.permission_); + } + onChanged(); + } + } else { + if (!other.permission_.isEmpty()) { + if (permissionBuilder_.isEmpty()) { + permissionBuilder_.dispose(); + permissionBuilder_ = null; + permission_ = other.permission_; + bitField0_ = (bitField0_ & ~0x00000001); + permissionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPermissionFieldBuilder() : null; + } else { + permissionBuilder_.addAllMessages(other.permission_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getPermissionCount(); i++) { + if (!getPermission(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addPermission(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .UserPermission permission = 1; + private java.util.List permission_ = + java.util.Collections.emptyList(); + private void ensurePermissionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = new java.util.ArrayList(permission_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; + + public java.util.List getPermissionList() { + if (permissionBuilder_ == null) { + return java.util.Collections.unmodifiableList(permission_); + } else { + return permissionBuilder_.getMessageList(); + } + } + public int getPermissionCount() { + if (permissionBuilder_ == null) { + return permission_.size(); + } else { + return permissionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) { + if (permissionBuilder_ == null) { + return permission_.get(index); + } else { + return permissionBuilder_.getMessage(index); + } + } + public Builder setPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionIsMutable(); + permission_.set(index, value); + onChanged(); + } else { + permissionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.set(index, builderForValue.build()); + onChanged(); + } else { + permissionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionIsMutable(); + permission_.add(value); + onChanged(); + } else { + permissionBuilder_.addMessage(value); + } + return this; + } + public Builder addPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionIsMutable(); + permission_.add(index, value); + onChanged(); + } else { + permissionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addPermission( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.add(builderForValue.build()); + onChanged(); + } else { + permissionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.add(index, builderForValue.build()); + onChanged(); + } else { + permissionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllPermission( + java.lang.Iterable values) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + super.addAll(values, permission_); + onChanged(); + } else { + permissionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearPermission() { + if (permissionBuilder_ == null) { + permission_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + permissionBuilder_.clear(); + } + return this; + } + public Builder removePermission(int index) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.remove(index); + onChanged(); + } else { + permissionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder( + int index) { + return getPermissionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( + int index) { + if (permissionBuilder_ == null) { + return permission_.get(index); } else { + return permissionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPermissionOrBuilderList() { + if (permissionBuilder_ != null) { + return permissionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(permission_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder() { + return getPermissionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder( + int index) { + return getPermissionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()); + } + public java.util.List + getPermissionBuilderList() { + return getPermissionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> + getPermissionFieldBuilder() { + if (permissionBuilder_ == null) { + permissionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>( + permission_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + permission_ = null; + } + return permissionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:UserPermissionsResponse) + } + + static { + defaultInstance = new UserPermissionsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserPermissionsResponse) + } + + public interface CheckPermissionsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Permission permission = 1; + java.util.List + getPermissionList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index); + int getPermissionCount(); + java.util.List + getPermissionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( + int index); + } + public static final class CheckPermissionsRequest extends + com.google.protobuf.GeneratedMessage + implements CheckPermissionsRequestOrBuilder { + // Use CheckPermissionsRequest.newBuilder() to construct. + private CheckPermissionsRequest(Builder builder) { + super(builder); + } + private CheckPermissionsRequest(boolean noInit) {} + + private static final CheckPermissionsRequest defaultInstance; + public static CheckPermissionsRequest getDefaultInstance() { + return defaultInstance; + } + + public CheckPermissionsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable; + } + + // repeated .Permission permission = 1; + public static final int PERMISSION_FIELD_NUMBER = 1; + private java.util.List permission_; + public java.util.List getPermissionList() { + return permission_; + } + public java.util.List + getPermissionOrBuilderList() { + return permission_; + } + public int getPermissionCount() { + return permission_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) { + return permission_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( + int index) { + return permission_.get(index); + } + + private void initFields() { + permission_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < permission_.size(); i++) { + output.writeMessage(1, permission_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < permission_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, permission_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) obj; + + boolean result = true; + result = result && getPermissionList() + .equals(other.getPermissionList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getPermissionCount() > 0) { + hash = (37 * hash) + PERMISSION_FIELD_NUMBER; + hash = (53 * hash) + getPermissionList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPermissionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (permissionBuilder_ == null) { + permission_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + permissionBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest(this); + int from_bitField0_ = bitField0_; + if (permissionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = java.util.Collections.unmodifiableList(permission_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.permission_ = permission_; + } else { + result.permission_ = permissionBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance()) return this; + if (permissionBuilder_ == null) { + if (!other.permission_.isEmpty()) { + if (permission_.isEmpty()) { + permission_ = other.permission_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePermissionIsMutable(); + permission_.addAll(other.permission_); + } + onChanged(); + } + } else { + if (!other.permission_.isEmpty()) { + if (permissionBuilder_.isEmpty()) { + permissionBuilder_.dispose(); + permissionBuilder_ = null; + permission_ = other.permission_; + bitField0_ = (bitField0_ & ~0x00000001); + permissionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPermissionFieldBuilder() : null; + } else { + permissionBuilder_.addAllMessages(other.permission_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addPermission(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Permission permission = 1; + private java.util.List permission_ = + java.util.Collections.emptyList(); + private void ensurePermissionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + permission_ = new java.util.ArrayList(permission_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_; + + public java.util.List getPermissionList() { + if (permissionBuilder_ == null) { + return java.util.Collections.unmodifiableList(permission_); + } else { + return permissionBuilder_.getMessageList(); + } + } + public int getPermissionCount() { + if (permissionBuilder_ == null) { + return permission_.size(); + } else { + return permissionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) { + if (permissionBuilder_ == null) { + return permission_.get(index); + } else { + return permissionBuilder_.getMessage(index); + } + } + public Builder setPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionIsMutable(); + permission_.set(index, value); + onChanged(); + } else { + permissionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.set(index, builderForValue.build()); + onChanged(); + } else { + permissionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionIsMutable(); + permission_.add(value); + onChanged(); + } else { + permissionBuilder_.addMessage(value); + } + return this; + } + public Builder addPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { + if (permissionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePermissionIsMutable(); + permission_.add(index, value); + onChanged(); + } else { + permissionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addPermission( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.add(builderForValue.build()); + onChanged(); + } else { + permissionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addPermission( + int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.add(index, builderForValue.build()); + onChanged(); + } else { + permissionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllPermission( + java.lang.Iterable values) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + super.addAll(values, permission_); + onChanged(); + } else { + permissionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearPermission() { + if (permissionBuilder_ == null) { + permission_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + permissionBuilder_.clear(); + } + return this; + } + public Builder removePermission(int index) { + if (permissionBuilder_ == null) { + ensurePermissionIsMutable(); + permission_.remove(index); + onChanged(); + } else { + permissionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder( + int index) { + return getPermissionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( + int index) { + if (permissionBuilder_ == null) { + return permission_.get(index); } else { + return permissionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPermissionOrBuilderList() { + if (permissionBuilder_ != null) { + return permissionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(permission_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder() { + return getPermissionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder( + int index) { + return getPermissionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); + } + public java.util.List + getPermissionBuilderList() { + return getPermissionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> + getPermissionFieldBuilder() { + if (permissionBuilder_ == null) { + permissionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>( + permission_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + permission_ = null; + } + return permissionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:CheckPermissionsRequest) + } + + static { + defaultInstance = new CheckPermissionsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CheckPermissionsRequest) + } + + public interface CheckPermissionsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CheckPermissionsResponse extends + com.google.protobuf.GeneratedMessage + implements CheckPermissionsResponseOrBuilder { + // Use CheckPermissionsResponse.newBuilder() to construct. + private CheckPermissionsResponse(Builder builder) { + super(builder); + } + private CheckPermissionsResponse(boolean noInit) {} + + private static final CheckPermissionsResponse defaultInstance; + public static CheckPermissionsResponse getDefaultInstance() { + return defaultInstance; + } + + public CheckPermissionsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CheckPermissionsResponse) + } + + static { + defaultInstance = new CheckPermissionsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CheckPermissionsResponse) + } + + public static abstract class AccessControlService + implements com.google.protobuf.Service { + protected AccessControlService() {} + + public interface Interface { + public abstract void grant( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void revoke( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getUserPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void checkPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new AccessControlService() { + @java.lang.Override + public void grant( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, + com.google.protobuf.RpcCallback done) { + impl.grant(controller, request, done); + } + + @java.lang.Override + public void revoke( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, + com.google.protobuf.RpcCallback done) { + impl.revoke(controller, request, done); + } + + @java.lang.Override + public void getUserPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, + com.google.protobuf.RpcCallback done) { + impl.getUserPermissions(controller, request, done); + } + + @java.lang.Override + public void checkPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, + com.google.protobuf.RpcCallback done) { + impl.checkPermissions(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.grant(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)request); + case 1: + return impl.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request); + case 2: + return impl.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request); + case 3: + return impl.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void grant( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void revoke( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getUserPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void checkPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.grant(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void grant( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance())); + } + + public void revoke( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance())); + } + + public void getUserPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance())); + } + + public void checkPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Permission_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Permission_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserPermission_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserPermission_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserTablePermissions_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserTablePermissions_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserTablePermissions_UserPermissions_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GrantRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GrantRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GrantResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GrantResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RevokeRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RevokeRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RevokeResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RevokeResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserPermissionsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserPermissionsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserPermissionsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserPermissionsResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CheckPermissionsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CheckPermissionsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CheckPermissionsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CheckPermissionsResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\023AccessControl.proto\"\242\001\n\nPermission\022\"\n\006" + + "action\030\001 \003(\0162\022.Permission.Action\022\r\n\005tabl" + + "e\030\002 \001(\014\022\016\n\006family\030\003 \001(\014\022\021\n\tqualifier\030\004 \001" + + "(\014\">\n\006Action\022\010\n\004READ\020\000\022\t\n\005WRITE\020\001\022\010\n\004EXE" + + "C\020\002\022\n\n\006CREATE\020\003\022\t\n\005ADMIN\020\004\"?\n\016UserPermis" + + "sion\022\014\n\004user\030\001 \002(\014\022\037\n\npermission\030\002 \002(\0132\013" + + ".Permission\"\225\001\n\024UserTablePermissions\022:\n\013" + + "permissions\030\001 \003(\0132%.UserTablePermissions" + + ".UserPermissions\032A\n\017UserPermissions\022\014\n\004u" + + "ser\030\001 \002(\014\022 \n\013permissions\030\002 \003(\0132\013.Permiss", + "ion\"3\n\014GrantRequest\022#\n\npermission\030\001 \002(\0132" + + "\017.UserPermission\"\017\n\rGrantResponse\"4\n\rRev" + + "okeRequest\022#\n\npermission\030\001 \002(\0132\017.UserPer" + + "mission\"\020\n\016RevokeResponse\"\'\n\026UserPermiss" + + "ionsRequest\022\r\n\005table\030\001 \002(\014\">\n\027UserPermis" + + "sionsResponse\022#\n\npermission\030\001 \003(\0132\017.User" + + "Permission\":\n\027CheckPermissionsRequest\022\037\n" + + "\npermission\030\001 \003(\0132\013.Permission\"\032\n\030CheckP" + + "ermissionsResponse2\373\001\n\024AccessControlServ" + + "ice\022&\n\005grant\022\r.GrantRequest\032\016.GrantRespo", + "nse\022)\n\006revoke\022\016.RevokeRequest\032\017.RevokeRe" + + "sponse\022G\n\022getUserPermissions\022\027.UserPermi" + + "ssionsRequest\032\030.UserPermissionsResponse\022" + + "G\n\020checkPermissions\022\030.CheckPermissionsRe" + + "quest\032\031.CheckPermissionsResponseBI\n*org." + + "apache.hadoop.hbase.protobuf.generatedB\023" + + "AccessControlProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_Permission_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Permission_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Permission_descriptor, + new java.lang.String[] { "Action", "Table", "Family", "Qualifier", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder.class); + internal_static_UserPermission_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_UserPermission_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserPermission_descriptor, + new java.lang.String[] { "User", "Permission", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder.class); + internal_static_UserTablePermissions_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_UserTablePermissions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserTablePermissions_descriptor, + new java.lang.String[] { "Permissions", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.Builder.class); + internal_static_UserTablePermissions_UserPermissions_descriptor = + internal_static_UserTablePermissions_descriptor.getNestedTypes().get(0); + internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserTablePermissions_UserPermissions_descriptor, + new java.lang.String[] { "User", "Permissions", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder.class); + internal_static_GrantRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GrantRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GrantRequest_descriptor, + new java.lang.String[] { "Permission", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.Builder.class); + internal_static_GrantResponse_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GrantResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GrantResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.Builder.class); + internal_static_RevokeRequest_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_RevokeRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RevokeRequest_descriptor, + new java.lang.String[] { "Permission", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.Builder.class); + internal_static_RevokeResponse_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_RevokeResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RevokeResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.Builder.class); + internal_static_UserPermissionsRequest_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_UserPermissionsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserPermissionsRequest_descriptor, + new java.lang.String[] { "Table", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); + internal_static_UserPermissionsResponse_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_UserPermissionsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserPermissionsResponse_descriptor, + new java.lang.String[] { "Permission", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); + internal_static_CheckPermissionsRequest_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_CheckPermissionsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CheckPermissionsRequest_descriptor, + new java.lang.String[] { "Permission", }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.Builder.class); + internal_static_CheckPermissionsResponse_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_CheckPermissionsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CheckPermissionsResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java new file mode 100644 index 0000000..73434c1 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java @@ -0,0 +1,16766 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Admin.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class AdminProtos { + private AdminProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface GetRegionInfoRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bool compactionState = 2; + boolean hasCompactionState(); + boolean getCompactionState(); + } + public static final class GetRegionInfoRequest extends + com.google.protobuf.GeneratedMessage + implements GetRegionInfoRequestOrBuilder { + // Use GetRegionInfoRequest.newBuilder() to construct. + private GetRegionInfoRequest(Builder builder) { + super(builder); + } + private GetRegionInfoRequest(boolean noInit) {} + + private static final GetRegionInfoRequest defaultInstance; + public static GetRegionInfoRequest getDefaultInstance() { + return defaultInstance; + } + + public GetRegionInfoRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bool compactionState = 2; + public static final int COMPACTIONSTATE_FIELD_NUMBER = 2; + private boolean compactionState_; + public boolean hasCompactionState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getCompactionState() { + return compactionState_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + compactionState_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, compactionState_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, compactionState_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasCompactionState() == other.hasCompactionState()); + if (hasCompactionState()) { + result = result && (getCompactionState() + == other.getCompactionState()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasCompactionState()) { + hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCompactionState()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + compactionState_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.compactionState_ = compactionState_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasCompactionState()) { + setCompactionState(other.getCompactionState()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + compactionState_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bool compactionState = 2; + private boolean compactionState_ ; + public boolean hasCompactionState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getCompactionState() { + return compactionState_; + } + public Builder setCompactionState(boolean value) { + bitField0_ |= 0x00000002; + compactionState_ = value; + onChanged(); + return this; + } + public Builder clearCompactionState() { + bitField0_ = (bitField0_ & ~0x00000002); + compactionState_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) + } + + static { + defaultInstance = new GetRegionInfoRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) + } + + public interface GetRegionInfoResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionInfo regionInfo = 1; + boolean hasRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); + + // optional .GetRegionInfoResponse.CompactionState compactionState = 2; + boolean hasCompactionState(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState(); + } + public static final class GetRegionInfoResponse extends + com.google.protobuf.GeneratedMessage + implements GetRegionInfoResponseOrBuilder { + // Use GetRegionInfoResponse.newBuilder() to construct. + private GetRegionInfoResponse(Builder builder) { + super(builder); + } + private GetRegionInfoResponse(boolean noInit) {} + + private static final GetRegionInfoResponse defaultInstance; + public static GetRegionInfoResponse getDefaultInstance() { + return defaultInstance; + } + + public GetRegionInfoResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + } + + public enum CompactionState + implements com.google.protobuf.ProtocolMessageEnum { + NONE(0, 0), + MINOR(1, 1), + MAJOR(2, 2), + MAJOR_AND_MINOR(3, 3), + ; + + public static final int NONE_VALUE = 0; + public static final int MINOR_VALUE = 1; + public static final int MAJOR_VALUE = 2; + public static final int MAJOR_AND_MINOR_VALUE = 3; + + + public final int getNumber() { return value; } + + public static CompactionState valueOf(int value) { + switch (value) { + case 0: return NONE; + case 1: return MINOR; + case 2: return MAJOR; + case 3: return MAJOR_AND_MINOR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public CompactionState findValueByNumber(int number) { + return CompactionState.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0); + } + + private static final CompactionState[] VALUES = { + NONE, MINOR, MAJOR, MAJOR_AND_MINOR, + }; + + public static CompactionState valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private CompactionState(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:GetRegionInfoResponse.CompactionState) + } + + private int bitField0_; + // required .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + return regionInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + return regionInfo_; + } + + // optional .GetRegionInfoResponse.CompactionState compactionState = 2; + public static final int COMPACTIONSTATE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_; + public boolean hasCompactionState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { + return compactionState_; + } + + private void initFields() { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionInfo()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegionInfo().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, regionInfo_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, compactionState_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, compactionState_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj; + + boolean result = true; + result = result && (hasRegionInfo() == other.hasRegionInfo()); + if (hasRegionInfo()) { + result = result && getRegionInfo() + .equals(other.getRegionInfo()); + } + result = result && (hasCompactionState() == other.hasCompactionState()); + if (hasCompactionState()) { + result = result && + (getCompactionState() == other.getCompactionState()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionInfo()) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfo().hashCode(); + } + if (hasCompactionState()) { + hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getCompactionState()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionInfoBuilder_ == null) { + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.compactionState_ = compactionState_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; + if (other.hasRegionInfo()) { + mergeRegionInfo(other.getRegionInfo()); + } + if (other.hasCompactionState()) { + setCompactionState(other.getCompactionState()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionInfo()) { + + return false; + } + if (!getRegionInfo().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + if (hasRegionInfo()) { + subBuilder.mergeFrom(getRegionInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegionInfo(subBuilder.buildPartial()); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + compactionState_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required .RegionInfo regionInfo = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + if (regionInfoBuilder_ == null) { + return regionInfo_; + } else { + return regionInfoBuilder_.getMessage(); + } + } + public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + regionInfo_ = value; + onChanged(); + } else { + regionInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + regionInfo_ = builderForValue.build(); + onChanged(); + } else { + regionInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { + regionInfo_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); + } else { + regionInfo_ = value; + } + onChanged(); + } else { + regionInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilder(); + } else { + return regionInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // optional .GetRegionInfoResponse.CompactionState compactionState = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + public boolean hasCompactionState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { + return compactionState_; + } + public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + compactionState_ = value; + onChanged(); + return this; + } + public Builder clearCompactionState() { + bitField0_ = (bitField0_ & ~0x00000002); + compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) + } + + static { + defaultInstance = new GetRegionInfoResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) + } + + public interface GetStoreFileRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated bytes family = 2; + java.util.List getFamilyList(); + int getFamilyCount(); + com.google.protobuf.ByteString getFamily(int index); + } + public static final class GetStoreFileRequest extends + com.google.protobuf.GeneratedMessage + implements GetStoreFileRequestOrBuilder { + // Use GetStoreFileRequest.newBuilder() to construct. + private GetStoreFileRequest(Builder builder) { + super(builder); + } + private GetStoreFileRequest(boolean noInit) {} + + private static final GetStoreFileRequest defaultInstance; + public static GetStoreFileRequest getDefaultInstance() { + return defaultInstance; + } + + public GetStoreFileRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated bytes family = 2; + public static final int FAMILY_FIELD_NUMBER = 2; + private java.util.List family_; + public java.util.List + getFamilyList() { + return family_; + } + public int getFamilyCount() { + return family_.size(); + } + public com.google.protobuf.ByteString getFamily(int index) { + return family_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + family_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < family_.size(); i++) { + output.writeBytes(2, family_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + { + int dataSize = 0; + for (int i = 0; i < family_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(family_.get(i)); + } + size += dataSize; + size += 1 * getFamilyList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getFamilyList() + .equals(other.getFamilyList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getFamilyCount() > 0) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamilyList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + family_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + family_ = java.util.Collections.unmodifiableList(family_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.family_ = family_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (!other.family_.isEmpty()) { + if (family_.isEmpty()) { + family_ = other.family_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyIsMutable(); + family_.addAll(other.family_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + ensureFamilyIsMutable(); + family_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated bytes family = 2; + private java.util.List family_ = java.util.Collections.emptyList();; + private void ensureFamilyIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + family_ = new java.util.ArrayList(family_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getFamilyList() { + return java.util.Collections.unmodifiableList(family_); + } + public int getFamilyCount() { + return family_.size(); + } + public com.google.protobuf.ByteString getFamily(int index) { + return family_.get(index); + } + public Builder setFamily( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyIsMutable(); + family_.set(index, value); + onChanged(); + return this; + } + public Builder addFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyIsMutable(); + family_.add(value); + onChanged(); + return this; + } + public Builder addAllFamily( + java.lang.Iterable values) { + ensureFamilyIsMutable(); + super.addAll(values, family_); + onChanged(); + return this; + } + public Builder clearFamily() { + family_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetStoreFileRequest) + } + + static { + defaultInstance = new GetStoreFileRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetStoreFileRequest) + } + + public interface GetStoreFileResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string storeFile = 1; + java.util.List getStoreFileList(); + int getStoreFileCount(); + String getStoreFile(int index); + } + public static final class GetStoreFileResponse extends + com.google.protobuf.GeneratedMessage + implements GetStoreFileResponseOrBuilder { + // Use GetStoreFileResponse.newBuilder() to construct. + private GetStoreFileResponse(Builder builder) { + super(builder); + } + private GetStoreFileResponse(boolean noInit) {} + + private static final GetStoreFileResponse defaultInstance; + public static GetStoreFileResponse getDefaultInstance() { + return defaultInstance; + } + + public GetStoreFileResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable; + } + + // repeated string storeFile = 1; + public static final int STOREFILE_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList storeFile_; + public java.util.List + getStoreFileList() { + return storeFile_; + } + public int getStoreFileCount() { + return storeFile_.size(); + } + public String getStoreFile(int index) { + return storeFile_.get(index); + } + + private void initFields() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < storeFile_.size(); i++) { + output.writeBytes(1, storeFile_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < storeFile_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(storeFile_.getByteString(i)); + } + size += dataSize; + size += 1 * getStoreFileList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) obj; + + boolean result = true; + result = result && getStoreFileList() + .equals(other.getStoreFileList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getStoreFileCount() > 0) { + hash = (37 * hash) + STOREFILE_FIELD_NUMBER; + hash = (53 * hash) + getStoreFileList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( + storeFile_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.storeFile_ = storeFile_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()) return this; + if (!other.storeFile_.isEmpty()) { + if (storeFile_.isEmpty()) { + storeFile_ = other.storeFile_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureStoreFileIsMutable(); + storeFile_.addAll(other.storeFile_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureStoreFileIsMutable(); + storeFile_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated string storeFile = 1; + private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureStoreFileIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getStoreFileList() { + return java.util.Collections.unmodifiableList(storeFile_); + } + public int getStoreFileCount() { + return storeFile_.size(); + } + public String getStoreFile(int index) { + return storeFile_.get(index); + } + public Builder setStoreFile( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.set(index, value); + onChanged(); + return this; + } + public Builder addStoreFile(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + return this; + } + public Builder addAllStoreFile( + java.lang.Iterable values) { + ensureStoreFileIsMutable(); + super.addAll(values, storeFile_); + onChanged(); + return this; + } + public Builder clearStoreFile() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + void addStoreFile(com.google.protobuf.ByteString value) { + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:GetStoreFileResponse) + } + + static { + defaultInstance = new GetStoreFileResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetStoreFileResponse) + } + + public interface GetOnlineRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetOnlineRegionRequest extends + com.google.protobuf.GeneratedMessage + implements GetOnlineRegionRequestOrBuilder { + // Use GetOnlineRegionRequest.newBuilder() to construct. + private GetOnlineRegionRequest(Builder builder) { + super(builder); + } + private GetOnlineRegionRequest(boolean noInit) {} + + private static final GetOnlineRegionRequest defaultInstance; + public static GetOnlineRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public GetOnlineRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) + } + + static { + defaultInstance = new GetOnlineRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) + } + + public interface GetOnlineRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionInfo regionInfo = 1; + java.util.List + getRegionInfoList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); + int getRegionInfoCount(); + java.util.List + getRegionInfoOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index); + } + public static final class GetOnlineRegionResponse extends + com.google.protobuf.GeneratedMessage + implements GetOnlineRegionResponseOrBuilder { + // Use GetOnlineRegionResponse.newBuilder() to construct. + private GetOnlineRegionResponse(Builder builder) { + super(builder); + } + private GetOnlineRegionResponse(boolean noInit) {} + + private static final GetOnlineRegionResponse defaultInstance; + public static GetOnlineRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public GetOnlineRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + } + + // repeated .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private java.util.List regionInfo_; + public java.util.List getRegionInfoList() { + return regionInfo_; + } + public java.util.List + getRegionInfoOrBuilderList() { + return regionInfo_; + } + public int getRegionInfoCount() { + return regionInfo_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { + return regionInfo_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index) { + return regionInfo_.get(index); + } + + private void initFields() { + regionInfo_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionInfoCount(); i++) { + if (!getRegionInfo(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionInfo_.size(); i++) { + output.writeMessage(1, regionInfo_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < regionInfo_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj; + + boolean result = true; + result = result && getRegionInfoList() + .equals(other.getRegionInfoList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionInfoCount() > 0) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfoList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + regionInfoBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this); + int from_bitField0_ = bitField0_; + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; + if (regionInfoBuilder_ == null) { + if (!other.regionInfo_.isEmpty()) { + if (regionInfo_.isEmpty()) { + regionInfo_ = other.regionInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionInfoIsMutable(); + regionInfo_.addAll(other.regionInfo_); + } + onChanged(); + } + } else { + if (!other.regionInfo_.isEmpty()) { + if (regionInfoBuilder_.isEmpty()) { + regionInfoBuilder_.dispose(); + regionInfoBuilder_ = null; + regionInfo_ = other.regionInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + regionInfoBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionInfoFieldBuilder() : null; + } else { + regionInfoBuilder_.addAllMessages(other.regionInfo_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionInfoCount(); i++) { + if (!getRegionInfo(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegionInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .RegionInfo regionInfo = 1; + private java.util.List regionInfo_ = + java.util.Collections.emptyList(); + private void ensureRegionInfoIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = new java.util.ArrayList(regionInfo_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + + public java.util.List getRegionInfoList() { + if (regionInfoBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionInfo_); + } else { + return regionInfoBuilder_.getMessageList(); + } + } + public int getRegionInfoCount() { + if (regionInfoBuilder_ == null) { + return regionInfo_.size(); + } else { + return regionInfoBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { + if (regionInfoBuilder_ == null) { + return regionInfo_.get(index); + } else { + return regionInfoBuilder_.getMessage(index); + } + } + public Builder setRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.set(index, value); + onChanged(); + } else { + regionInfoBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.set(index, builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.add(value); + onChanged(); + } else { + regionInfoBuilder_.addMessage(value); + } + return this; + } + public Builder addRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.add(index, value); + onChanged(); + } else { + regionInfoBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.add(builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.add(index, builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegionInfo( + java.lang.Iterable values) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + super.addAll(values, regionInfo_); + onChanged(); + } else { + regionInfoBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + return this; + } + public Builder removeRegionInfo(int index) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.remove(index); + onChanged(); + } else { + regionInfoBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( + int index) { + return getRegionInfoFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index) { + if (regionInfoBuilder_ == null) { + return regionInfo_.get(index); } else { + return regionInfoBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionInfoOrBuilderList() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(regionInfo_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { + return getRegionInfoFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( + int index) { + return getRegionInfoFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); + } + public java.util.List + getRegionInfoBuilderList() { + return getRegionInfoFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) + } + + static { + defaultInstance = new GetOnlineRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) + } + + public interface OpenRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + java.util.List + getOpenInfoList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index); + int getOpenInfoCount(); + java.util.List + getOpenInfoOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( + int index); + } + public static final class OpenRegionRequest extends + com.google.protobuf.GeneratedMessage + implements OpenRegionRequestOrBuilder { + // Use OpenRegionRequest.newBuilder() to construct. + private OpenRegionRequest(Builder builder) { + super(builder); + } + private OpenRegionRequest(boolean noInit) {} + + private static final OpenRegionRequest defaultInstance; + public static OpenRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public OpenRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + } + + public interface RegionOpenInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionInfo region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder(); + + // optional uint32 versionOfOfflineNode = 2; + boolean hasVersionOfOfflineNode(); + int getVersionOfOfflineNode(); + } + public static final class RegionOpenInfo extends + com.google.protobuf.GeneratedMessage + implements RegionOpenInfoOrBuilder { + // Use RegionOpenInfo.newBuilder() to construct. + private RegionOpenInfo(Builder builder) { + super(builder); + } + private RegionOpenInfo(boolean noInit) {} + + private static final RegionOpenInfo defaultInstance; + public static RegionOpenInfo getDefaultInstance() { + return defaultInstance; + } + + public RegionOpenInfo getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionInfo region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint32 versionOfOfflineNode = 2; + public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2; + private int versionOfOfflineNode_; + public boolean hasVersionOfOfflineNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfOfflineNode() { + return versionOfOfflineNode_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + versionOfOfflineNode_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, versionOfOfflineNode_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, versionOfOfflineNode_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode()); + if (hasVersionOfOfflineNode()) { + result = result && (getVersionOfOfflineNode() + == other.getVersionOfOfflineNode()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasVersionOfOfflineNode()) { + hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER; + hash = (53 * hash) + getVersionOfOfflineNode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + versionOfOfflineNode_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.versionOfOfflineNode_ = versionOfOfflineNode_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasVersionOfOfflineNode()) { + setVersionOfOfflineNode(other.getVersionOfOfflineNode()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionInfo region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint32 versionOfOfflineNode = 2; + private int versionOfOfflineNode_ ; + public boolean hasVersionOfOfflineNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfOfflineNode() { + return versionOfOfflineNode_; + } + public Builder setVersionOfOfflineNode(int value) { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = value; + onChanged(); + return this; + } + public Builder clearVersionOfOfflineNode() { + bitField0_ = (bitField0_ & ~0x00000002); + versionOfOfflineNode_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionRequest.RegionOpenInfo) + } + + static { + defaultInstance = new RegionOpenInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionRequest.RegionOpenInfo) + } + + // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + public static final int OPENINFO_FIELD_NUMBER = 1; + private java.util.List openInfo_; + public java.util.List getOpenInfoList() { + return openInfo_; + } + public java.util.List + getOpenInfoOrBuilderList() { + return openInfo_; + } + public int getOpenInfoCount() { + return openInfo_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { + return openInfo_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( + int index) { + return openInfo_.get(index); + } + + private void initFields() { + openInfo_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getOpenInfoCount(); i++) { + if (!getOpenInfo(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < openInfo_.size(); i++) { + output.writeMessage(1, openInfo_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < openInfo_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, openInfo_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj; + + boolean result = true; + result = result && getOpenInfoList() + .equals(other.getOpenInfoList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getOpenInfoCount() > 0) { + hash = (37 * hash) + OPENINFO_FIELD_NUMBER; + hash = (53 * hash) + getOpenInfoList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getOpenInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (openInfoBuilder_ == null) { + openInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + openInfoBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this); + int from_bitField0_ = bitField0_; + if (openInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + openInfo_ = java.util.Collections.unmodifiableList(openInfo_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.openInfo_ = openInfo_; + } else { + result.openInfo_ = openInfoBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this; + if (openInfoBuilder_ == null) { + if (!other.openInfo_.isEmpty()) { + if (openInfo_.isEmpty()) { + openInfo_ = other.openInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureOpenInfoIsMutable(); + openInfo_.addAll(other.openInfo_); + } + onChanged(); + } + } else { + if (!other.openInfo_.isEmpty()) { + if (openInfoBuilder_.isEmpty()) { + openInfoBuilder_.dispose(); + openInfoBuilder_ = null; + openInfo_ = other.openInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + openInfoBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getOpenInfoFieldBuilder() : null; + } else { + openInfoBuilder_.addAllMessages(other.openInfo_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getOpenInfoCount(); i++) { + if (!getOpenInfo(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addOpenInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; + private java.util.List openInfo_ = + java.util.Collections.emptyList(); + private void ensureOpenInfoIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + openInfo_ = new java.util.ArrayList(openInfo_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_; + + public java.util.List getOpenInfoList() { + if (openInfoBuilder_ == null) { + return java.util.Collections.unmodifiableList(openInfo_); + } else { + return openInfoBuilder_.getMessageList(); + } + } + public int getOpenInfoCount() { + if (openInfoBuilder_ == null) { + return openInfo_.size(); + } else { + return openInfoBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { + if (openInfoBuilder_ == null) { + return openInfo_.get(index); + } else { + return openInfoBuilder_.getMessage(index); + } + } + public Builder setOpenInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { + if (openInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpenInfoIsMutable(); + openInfo_.set(index, value); + onChanged(); + } else { + openInfoBuilder_.setMessage(index, value); + } + return this; + } + public Builder setOpenInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { + if (openInfoBuilder_ == null) { + ensureOpenInfoIsMutable(); + openInfo_.set(index, builderForValue.build()); + onChanged(); + } else { + openInfoBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { + if (openInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpenInfoIsMutable(); + openInfo_.add(value); + onChanged(); + } else { + openInfoBuilder_.addMessage(value); + } + return this; + } + public Builder addOpenInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { + if (openInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpenInfoIsMutable(); + openInfo_.add(index, value); + onChanged(); + } else { + openInfoBuilder_.addMessage(index, value); + } + return this; + } + public Builder addOpenInfo( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { + if (openInfoBuilder_ == null) { + ensureOpenInfoIsMutable(); + openInfo_.add(builderForValue.build()); + onChanged(); + } else { + openInfoBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addOpenInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { + if (openInfoBuilder_ == null) { + ensureOpenInfoIsMutable(); + openInfo_.add(index, builderForValue.build()); + onChanged(); + } else { + openInfoBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllOpenInfo( + java.lang.Iterable values) { + if (openInfoBuilder_ == null) { + ensureOpenInfoIsMutable(); + super.addAll(values, openInfo_); + onChanged(); + } else { + openInfoBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearOpenInfo() { + if (openInfoBuilder_ == null) { + openInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + openInfoBuilder_.clear(); + } + return this; + } + public Builder removeOpenInfo(int index) { + if (openInfoBuilder_ == null) { + ensureOpenInfoIsMutable(); + openInfo_.remove(index); + onChanged(); + } else { + openInfoBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder getOpenInfoBuilder( + int index) { + return getOpenInfoFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( + int index) { + if (openInfoBuilder_ == null) { + return openInfo_.get(index); } else { + return openInfoBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getOpenInfoOrBuilderList() { + if (openInfoBuilder_ != null) { + return openInfoBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(openInfo_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder() { + return getOpenInfoFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder( + int index) { + return getOpenInfoFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); + } + public java.util.List + getOpenInfoBuilderList() { + return getOpenInfoFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> + getOpenInfoFieldBuilder() { + if (openInfoBuilder_ == null) { + openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>( + openInfo_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + openInfo_ = null; + } + return openInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionRequest) + } + + static { + defaultInstance = new OpenRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionRequest) + } + + public interface OpenRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + java.util.List getOpeningStateList(); + int getOpeningStateCount(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); + } + public static final class OpenRegionResponse extends + com.google.protobuf.GeneratedMessage + implements OpenRegionResponseOrBuilder { + // Use OpenRegionResponse.newBuilder() to construct. + private OpenRegionResponse(Builder builder) { + super(builder); + } + private OpenRegionResponse(boolean noInit) {} + + private static final OpenRegionResponse defaultInstance; + public static OpenRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public OpenRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + } + + public enum RegionOpeningState + implements com.google.protobuf.ProtocolMessageEnum { + OPENED(0, 0), + ALREADY_OPENED(1, 1), + FAILED_OPENING(2, 2), + ; + + public static final int OPENED_VALUE = 0; + public static final int ALREADY_OPENED_VALUE = 1; + public static final int FAILED_OPENING_VALUE = 2; + + + public final int getNumber() { return value; } + + public static RegionOpeningState valueOf(int value) { + switch (value) { + case 0: return OPENED; + case 1: return ALREADY_OPENED; + case 2: return FAILED_OPENING; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public RegionOpeningState findValueByNumber(int number) { + return RegionOpeningState.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); + } + + private static final RegionOpeningState[] VALUES = { + OPENED, ALREADY_OPENED, FAILED_OPENING, + }; + + public static RegionOpeningState valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private RegionOpeningState(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) + } + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + public static final int OPENINGSTATE_FIELD_NUMBER = 1; + private java.util.List openingState_; + public java.util.List getOpeningStateList() { + return openingState_; + } + public int getOpeningStateCount() { + return openingState_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + return openingState_.get(index); + } + + private void initFields() { + openingState_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < openingState_.size(); i++) { + output.writeEnum(1, openingState_.get(i).getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < openingState_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(openingState_.get(i).getNumber()); + } + size += dataSize; + size += 1 * openingState_.size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj; + + boolean result = true; + result = result && getOpeningStateList() + .equals(other.getOpeningStateList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getOpeningStateCount() > 0) { + hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnumList(getOpeningStateList()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + openingState_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = java.util.Collections.unmodifiableList(openingState_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.openingState_ = openingState_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this; + if (!other.openingState_.isEmpty()) { + if (openingState_.isEmpty()) { + openingState_ = other.openingState_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureOpeningStateIsMutable(); + openingState_.addAll(other.openingState_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addOpeningState(value); + } + break; + } + case 10: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addOpeningState(value); + } + } + input.popLimit(oldLimit); + break; + } + } + } + } + + private int bitField0_; + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + private java.util.List openingState_ = + java.util.Collections.emptyList(); + private void ensureOpeningStateIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = new java.util.ArrayList(openingState_); + bitField0_ |= 0x00000001; + } + } + public java.util.List getOpeningStateList() { + return java.util.Collections.unmodifiableList(openingState_); + } + public int getOpeningStateCount() { + return openingState_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + return openingState_.get(index); + } + public Builder setOpeningState( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpeningStateIsMutable(); + openingState_.set(index, value); + onChanged(); + return this; + } + public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpeningStateIsMutable(); + openingState_.add(value); + onChanged(); + return this; + } + public Builder addAllOpeningState( + java.lang.Iterable values) { + ensureOpeningStateIsMutable(); + super.addAll(values, openingState_); + onChanged(); + return this; + } + public Builder clearOpeningState() { + openingState_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionResponse) + } + + static { + defaultInstance = new OpenRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionResponse) + } + + public interface CloseRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional uint32 versionOfClosingNode = 2; + boolean hasVersionOfClosingNode(); + int getVersionOfClosingNode(); + + // optional bool transitionInZK = 3 [default = true]; + boolean hasTransitionInZK(); + boolean getTransitionInZK(); + + // optional .ServerName destinationServer = 4; + boolean hasDestinationServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder(); + } + public static final class CloseRegionRequest extends + com.google.protobuf.GeneratedMessage + implements CloseRegionRequestOrBuilder { + // Use CloseRegionRequest.newBuilder() to construct. + private CloseRegionRequest(Builder builder) { + super(builder); + } + private CloseRegionRequest(boolean noInit) {} + + private static final CloseRegionRequest defaultInstance; + public static CloseRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public CloseRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint32 versionOfClosingNode = 2; + public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2; + private int versionOfClosingNode_; + public boolean hasVersionOfClosingNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfClosingNode() { + return versionOfClosingNode_; + } + + // optional bool transitionInZK = 3 [default = true]; + public static final int TRANSITIONINZK_FIELD_NUMBER = 3; + private boolean transitionInZK_; + public boolean hasTransitionInZK() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getTransitionInZK() { + return transitionInZK_; + } + + // optional .ServerName destinationServer = 4; + public static final int DESTINATIONSERVER_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_; + public boolean hasDestinationServer() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { + return destinationServer_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { + return destinationServer_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + versionOfClosingNode_ = 0; + transitionInZK_ = true; + destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasDestinationServer()) { + if (!getDestinationServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, versionOfClosingNode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, transitionInZK_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(4, destinationServer_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, versionOfClosingNode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, transitionInZK_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, destinationServer_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode()); + if (hasVersionOfClosingNode()) { + result = result && (getVersionOfClosingNode() + == other.getVersionOfClosingNode()); + } + result = result && (hasTransitionInZK() == other.hasTransitionInZK()); + if (hasTransitionInZK()) { + result = result && (getTransitionInZK() + == other.getTransitionInZK()); + } + result = result && (hasDestinationServer() == other.hasDestinationServer()); + if (hasDestinationServer()) { + result = result && getDestinationServer() + .equals(other.getDestinationServer()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasVersionOfClosingNode()) { + hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER; + hash = (53 * hash) + getVersionOfClosingNode(); + } + if (hasTransitionInZK()) { + hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getTransitionInZK()); + } + if (hasDestinationServer()) { + hash = (37 * hash) + DESTINATIONSERVER_FIELD_NUMBER; + hash = (53 * hash) + getDestinationServer().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getDestinationServerFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + versionOfClosingNode_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + transitionInZK_ = true; + bitField0_ = (bitField0_ & ~0x00000004); + if (destinationServerBuilder_ == null) { + destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + destinationServerBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.versionOfClosingNode_ = versionOfClosingNode_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.transitionInZK_ = transitionInZK_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + if (destinationServerBuilder_ == null) { + result.destinationServer_ = destinationServer_; + } else { + result.destinationServer_ = destinationServerBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasVersionOfClosingNode()) { + setVersionOfClosingNode(other.getVersionOfClosingNode()); + } + if (other.hasTransitionInZK()) { + setTransitionInZK(other.getTransitionInZK()); + } + if (other.hasDestinationServer()) { + mergeDestinationServer(other.getDestinationServer()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (hasDestinationServer()) { + if (!getDestinationServer().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + transitionInZK_ = input.readBool(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasDestinationServer()) { + subBuilder.mergeFrom(getDestinationServer()); + } + input.readMessage(subBuilder, extensionRegistry); + setDestinationServer(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint32 versionOfClosingNode = 2; + private int versionOfClosingNode_ ; + public boolean hasVersionOfClosingNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfClosingNode() { + return versionOfClosingNode_; + } + public Builder setVersionOfClosingNode(int value) { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = value; + onChanged(); + return this; + } + public Builder clearVersionOfClosingNode() { + bitField0_ = (bitField0_ & ~0x00000002); + versionOfClosingNode_ = 0; + onChanged(); + return this; + } + + // optional bool transitionInZK = 3 [default = true]; + private boolean transitionInZK_ = true; + public boolean hasTransitionInZK() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getTransitionInZK() { + return transitionInZK_; + } + public Builder setTransitionInZK(boolean value) { + bitField0_ |= 0x00000004; + transitionInZK_ = value; + onChanged(); + return this; + } + public Builder clearTransitionInZK() { + bitField0_ = (bitField0_ & ~0x00000004); + transitionInZK_ = true; + onChanged(); + return this; + } + + // optional .ServerName destinationServer = 4; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_; + public boolean hasDestinationServer() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { + if (destinationServerBuilder_ == null) { + return destinationServer_; + } else { + return destinationServerBuilder_.getMessage(); + } + } + public Builder setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (destinationServerBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + destinationServer_ = value; + onChanged(); + } else { + destinationServerBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder setDestinationServer( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (destinationServerBuilder_ == null) { + destinationServer_ = builderForValue.build(); + onChanged(); + } else { + destinationServerBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (destinationServerBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + destinationServer_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + destinationServer_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial(); + } else { + destinationServer_ = value; + } + onChanged(); + } else { + destinationServerBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder clearDestinationServer() { + if (destinationServerBuilder_ == null) { + destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + destinationServerBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestinationServerBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getDestinationServerFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { + if (destinationServerBuilder_ != null) { + return destinationServerBuilder_.getMessageOrBuilder(); + } else { + return destinationServer_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getDestinationServerFieldBuilder() { + if (destinationServerBuilder_ == null) { + destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + destinationServer_, + getParentForChildren(), + isClean()); + destinationServer_ = null; + } + return destinationServerBuilder_; + } + + // @@protoc_insertion_point(builder_scope:CloseRegionRequest) + } + + static { + defaultInstance = new CloseRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CloseRegionRequest) + } + + public interface CloseRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool closed = 1; + boolean hasClosed(); + boolean getClosed(); + } + public static final class CloseRegionResponse extends + com.google.protobuf.GeneratedMessage + implements CloseRegionResponseOrBuilder { + // Use CloseRegionResponse.newBuilder() to construct. + private CloseRegionResponse(Builder builder) { + super(builder); + } + private CloseRegionResponse(boolean noInit) {} + + private static final CloseRegionResponse defaultInstance; + public static CloseRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public CloseRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool closed = 1; + public static final int CLOSED_FIELD_NUMBER = 1; + private boolean closed_; + public boolean hasClosed() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getClosed() { + return closed_; + } + + private void initFields() { + closed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClosed()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, closed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, closed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj; + + boolean result = true; + result = result && (hasClosed() == other.hasClosed()); + if (hasClosed()) { + result = result && (getClosed() + == other.getClosed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClosed()) { + hash = (37 * hash) + CLOSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getClosed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + closed_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.closed_ = closed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this; + if (other.hasClosed()) { + setClosed(other.getClosed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClosed()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + closed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool closed = 1; + private boolean closed_ ; + public boolean hasClosed() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getClosed() { + return closed_; + } + public Builder setClosed(boolean value) { + bitField0_ |= 0x00000001; + closed_ = value; + onChanged(); + return this; + } + public Builder clearClosed() { + bitField0_ = (bitField0_ & ~0x00000001); + closed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CloseRegionResponse) + } + + static { + defaultInstance = new CloseRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CloseRegionResponse) + } + + public interface FlushRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional uint64 ifOlderThanTs = 2; + boolean hasIfOlderThanTs(); + long getIfOlderThanTs(); + } + public static final class FlushRegionRequest extends + com.google.protobuf.GeneratedMessage + implements FlushRegionRequestOrBuilder { + // Use FlushRegionRequest.newBuilder() to construct. + private FlushRegionRequest(Builder builder) { + super(builder); + } + private FlushRegionRequest(boolean noInit) {} + + private static final FlushRegionRequest defaultInstance; + public static FlushRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public FlushRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint64 ifOlderThanTs = 2; + public static final int IFOLDERTHANTS_FIELD_NUMBER = 2; + private long ifOlderThanTs_; + public boolean hasIfOlderThanTs() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getIfOlderThanTs() { + return ifOlderThanTs_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + ifOlderThanTs_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, ifOlderThanTs_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, ifOlderThanTs_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs()); + if (hasIfOlderThanTs()) { + result = result && (getIfOlderThanTs() + == other.getIfOlderThanTs()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasIfOlderThanTs()) { + hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getIfOlderThanTs()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + ifOlderThanTs_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ifOlderThanTs_ = ifOlderThanTs_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasIfOlderThanTs()) { + setIfOlderThanTs(other.getIfOlderThanTs()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint64 ifOlderThanTs = 2; + private long ifOlderThanTs_ ; + public boolean hasIfOlderThanTs() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getIfOlderThanTs() { + return ifOlderThanTs_; + } + public Builder setIfOlderThanTs(long value) { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = value; + onChanged(); + return this; + } + public Builder clearIfOlderThanTs() { + bitField0_ = (bitField0_ & ~0x00000002); + ifOlderThanTs_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FlushRegionRequest) + } + + static { + defaultInstance = new FlushRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FlushRegionRequest) + } + + public interface FlushRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lastFlushTime = 1; + boolean hasLastFlushTime(); + long getLastFlushTime(); + + // optional bool flushed = 2; + boolean hasFlushed(); + boolean getFlushed(); + } + public static final class FlushRegionResponse extends + com.google.protobuf.GeneratedMessage + implements FlushRegionResponseOrBuilder { + // Use FlushRegionResponse.newBuilder() to construct. + private FlushRegionResponse(Builder builder) { + super(builder); + } + private FlushRegionResponse(boolean noInit) {} + + private static final FlushRegionResponse defaultInstance; + public static FlushRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public FlushRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lastFlushTime = 1; + public static final int LASTFLUSHTIME_FIELD_NUMBER = 1; + private long lastFlushTime_; + public boolean hasLastFlushTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushTime() { + return lastFlushTime_; + } + + // optional bool flushed = 2; + public static final int FLUSHED_FIELD_NUMBER = 2; + private boolean flushed_; + public boolean hasFlushed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getFlushed() { + return flushed_; + } + + private void initFields() { + lastFlushTime_ = 0L; + flushed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLastFlushTime()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lastFlushTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, flushed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lastFlushTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, flushed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj; + + boolean result = true; + result = result && (hasLastFlushTime() == other.hasLastFlushTime()); + if (hasLastFlushTime()) { + result = result && (getLastFlushTime() + == other.getLastFlushTime()); + } + result = result && (hasFlushed() == other.hasFlushed()); + if (hasFlushed()) { + result = result && (getFlushed() + == other.getFlushed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLastFlushTime()) { + hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLastFlushTime()); + } + if (hasFlushed()) { + hash = (37 * hash) + FLUSHED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getFlushed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lastFlushTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + flushed_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lastFlushTime_ = lastFlushTime_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.flushed_ = flushed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this; + if (other.hasLastFlushTime()) { + setLastFlushTime(other.getLastFlushTime()); + } + if (other.hasFlushed()) { + setFlushed(other.getFlushed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLastFlushTime()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lastFlushTime_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + flushed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lastFlushTime = 1; + private long lastFlushTime_ ; + public boolean hasLastFlushTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushTime() { + return lastFlushTime_; + } + public Builder setLastFlushTime(long value) { + bitField0_ |= 0x00000001; + lastFlushTime_ = value; + onChanged(); + return this; + } + public Builder clearLastFlushTime() { + bitField0_ = (bitField0_ & ~0x00000001); + lastFlushTime_ = 0L; + onChanged(); + return this; + } + + // optional bool flushed = 2; + private boolean flushed_ ; + public boolean hasFlushed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getFlushed() { + return flushed_; + } + public Builder setFlushed(boolean value) { + bitField0_ |= 0x00000002; + flushed_ = value; + onChanged(); + return this; + } + public Builder clearFlushed() { + bitField0_ = (bitField0_ & ~0x00000002); + flushed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FlushRegionResponse) + } + + static { + defaultInstance = new FlushRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FlushRegionResponse) + } + + public interface SplitRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bytes splitPoint = 2; + boolean hasSplitPoint(); + com.google.protobuf.ByteString getSplitPoint(); + } + public static final class SplitRegionRequest extends + com.google.protobuf.GeneratedMessage + implements SplitRegionRequestOrBuilder { + // Use SplitRegionRequest.newBuilder() to construct. + private SplitRegionRequest(Builder builder) { + super(builder); + } + private SplitRegionRequest(boolean noInit) {} + + private static final SplitRegionRequest defaultInstance; + public static SplitRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public SplitRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bytes splitPoint = 2; + public static final int SPLITPOINT_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString splitPoint_; + public boolean hasSplitPoint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSplitPoint() { + return splitPoint_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + splitPoint_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, splitPoint_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, splitPoint_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasSplitPoint() == other.hasSplitPoint()); + if (hasSplitPoint()) { + result = result && getSplitPoint() + .equals(other.getSplitPoint()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasSplitPoint()) { + hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER; + hash = (53 * hash) + getSplitPoint().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + splitPoint_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.splitPoint_ = splitPoint_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasSplitPoint()) { + setSplitPoint(other.getSplitPoint()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + splitPoint_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bytes splitPoint = 2; + private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSplitPoint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSplitPoint() { + return splitPoint_; + } + public Builder setSplitPoint(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + splitPoint_ = value; + onChanged(); + return this; + } + public Builder clearSplitPoint() { + bitField0_ = (bitField0_ & ~0x00000002); + splitPoint_ = getDefaultInstance().getSplitPoint(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SplitRegionRequest) + } + + static { + defaultInstance = new SplitRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitRegionRequest) + } + + public interface SplitRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class SplitRegionResponse extends + com.google.protobuf.GeneratedMessage + implements SplitRegionResponseOrBuilder { + // Use SplitRegionResponse.newBuilder() to construct. + private SplitRegionResponse(Builder builder) { + super(builder); + } + private SplitRegionResponse(boolean noInit) {} + + private static final SplitRegionResponse defaultInstance; + public static SplitRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public SplitRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:SplitRegionResponse) + } + + static { + defaultInstance = new SplitRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitRegionResponse) + } + + public interface CompactRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bool major = 2; + boolean hasMajor(); + boolean getMajor(); + + // optional bytes family = 3; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + } + public static final class CompactRegionRequest extends + com.google.protobuf.GeneratedMessage + implements CompactRegionRequestOrBuilder { + // Use CompactRegionRequest.newBuilder() to construct. + private CompactRegionRequest(Builder builder) { + super(builder); + } + private CompactRegionRequest(boolean noInit) {} + + private static final CompactRegionRequest defaultInstance; + public static CompactRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public CompactRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bool major = 2; + public static final int MAJOR_FIELD_NUMBER = 2; + private boolean major_; + public boolean hasMajor() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMajor() { + return major_; + } + + // optional bytes family = 3; + public static final int FAMILY_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + major_ = false; + family_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, major_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, family_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, major_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, family_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasMajor() == other.hasMajor()); + if (hasMajor()) { + result = result && (getMajor() + == other.getMajor()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasMajor()) { + hash = (37 * hash) + MAJOR_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMajor()); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + major_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.major_ = major_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.family_ = family_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasMajor()) { + setMajor(other.getMajor()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + major_ = input.readBool(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + family_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bool major = 2; + private boolean major_ ; + public boolean hasMajor() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMajor() { + return major_; + } + public Builder setMajor(boolean value) { + bitField0_ |= 0x00000002; + major_ = value; + onChanged(); + return this; + } + public Builder clearMajor() { + bitField0_ = (bitField0_ & ~0x00000002); + major_ = false; + onChanged(); + return this; + } + + // optional bytes family = 3; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000004); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CompactRegionRequest) + } + + static { + defaultInstance = new CompactRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactRegionRequest) + } + + public interface CompactRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CompactRegionResponse extends + com.google.protobuf.GeneratedMessage + implements CompactRegionResponseOrBuilder { + // Use CompactRegionResponse.newBuilder() to construct. + private CompactRegionResponse(Builder builder) { + super(builder); + } + private CompactRegionResponse(boolean noInit) {} + + private static final CompactRegionResponse defaultInstance; + public static CompactRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public CompactRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CompactRegionResponse) + } + + static { + defaultInstance = new CompactRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactRegionResponse) + } + + public interface UUIDOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 leastSigBits = 1; + boolean hasLeastSigBits(); + long getLeastSigBits(); + + // required uint64 mostSigBits = 2; + boolean hasMostSigBits(); + long getMostSigBits(); + } + public static final class UUID extends + com.google.protobuf.GeneratedMessage + implements UUIDOrBuilder { + // Use UUID.newBuilder() to construct. + private UUID(Builder builder) { + super(builder); + } + private UUID(boolean noInit) {} + + private static final UUID defaultInstance; + public static UUID getDefaultInstance() { + return defaultInstance; + } + + public UUID getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 leastSigBits = 1; + public static final int LEASTSIGBITS_FIELD_NUMBER = 1; + private long leastSigBits_; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + + // required uint64 mostSigBits = 2; + public static final int MOSTSIGBITS_FIELD_NUMBER = 2; + private long mostSigBits_; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + + private void initFields() { + leastSigBits_ = 0L; + mostSigBits_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLeastSigBits()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMostSigBits()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, mostSigBits_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, mostSigBits_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj; + + boolean result = true; + result = result && (hasLeastSigBits() == other.hasLeastSigBits()); + if (hasLeastSigBits()) { + result = result && (getLeastSigBits() + == other.getLeastSigBits()); + } + result = result && (hasMostSigBits() == other.hasMostSigBits()); + if (hasMostSigBits()) { + result = result && (getMostSigBits() + == other.getMostSigBits()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLeastSigBits()) { + hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLeastSigBits()); + } + if (hasMostSigBits()) { + hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getMostSigBits()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + leastSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + mostSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.leastSigBits_ = leastSigBits_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mostSigBits_ = mostSigBits_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this; + if (other.hasLeastSigBits()) { + setLeastSigBits(other.getLeastSigBits()); + } + if (other.hasMostSigBits()) { + setMostSigBits(other.getMostSigBits()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLeastSigBits()) { + + return false; + } + if (!hasMostSigBits()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + leastSigBits_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + mostSigBits_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 leastSigBits = 1; + private long leastSigBits_ ; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + public Builder setLeastSigBits(long value) { + bitField0_ |= 0x00000001; + leastSigBits_ = value; + onChanged(); + return this; + } + public Builder clearLeastSigBits() { + bitField0_ = (bitField0_ & ~0x00000001); + leastSigBits_ = 0L; + onChanged(); + return this; + } + + // required uint64 mostSigBits = 2; + private long mostSigBits_ ; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + public Builder setMostSigBits(long value) { + bitField0_ |= 0x00000002; + mostSigBits_ = value; + onChanged(); + return this; + } + public Builder clearMostSigBits() { + bitField0_ = (bitField0_ & ~0x00000002); + mostSigBits_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UUID) + } + + static { + defaultInstance = new UUID(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UUID) + } + + public interface WALEntryOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .WALEntry.WALKey key = 1; + boolean hasKey(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder(); + + // required .WALEntry.WALEdit edit = 2; + boolean hasEdit(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); + } + public static final class WALEntry extends + com.google.protobuf.GeneratedMessage + implements WALEntryOrBuilder { + // Use WALEntry.newBuilder() to construct. + private WALEntry(Builder builder) { + super(builder); + } + private WALEntry(boolean noInit) {} + + private static final WALEntry defaultInstance; + public static WALEntry getDefaultInstance() { + return defaultInstance; + } + + public WALEntry getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; + } + + public interface WALKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes encodedRegionName = 1; + boolean hasEncodedRegionName(); + com.google.protobuf.ByteString getEncodedRegionName(); + + // required bytes tableName = 2; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required uint64 logSequenceNumber = 3; + boolean hasLogSequenceNumber(); + long getLogSequenceNumber(); + + // required uint64 writeTime = 4; + boolean hasWriteTime(); + long getWriteTime(); + + // optional .UUID clusterId = 5; + boolean hasClusterId(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); + } + public static final class WALKey extends + com.google.protobuf.GeneratedMessage + implements WALKeyOrBuilder { + // Use WALKey.newBuilder() to construct. + private WALKey(Builder builder) { + super(builder); + } + private WALKey(boolean noInit) {} + + private static final WALKey defaultInstance; + public static WALKey getDefaultInstance() { + return defaultInstance; + } + + public WALKey getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + } + + private int bitField0_; + // required bytes encodedRegionName = 1; + public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString encodedRegionName_; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required uint64 logSequenceNumber = 3; + public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; + private long logSequenceNumber_; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + + // required uint64 writeTime = 4; + public static final int WRITETIME_FIELD_NUMBER = 4; + private long writeTime_; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + + // optional .UUID clusterId = 5; + public static final int CLUSTERID_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { + return clusterId_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + return clusterId_; + } + + private void initFields() { + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + tableName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + writeTime_ = 0L; + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEncodedRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLogSequenceNumber()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasWriteTime()) { + memoizedIsInitialized = 0; + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, clusterId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, clusterId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj; + + boolean result = true; + result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); + if (hasEncodedRegionName()) { + result = result && getEncodedRegionName() + .equals(other.getEncodedRegionName()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); + if (hasLogSequenceNumber()) { + result = result && (getLogSequenceNumber() + == other.getLogSequenceNumber()); + } + result = result && (hasWriteTime() == other.hasWriteTime()); + if (hasWriteTime()) { + result = result && (getWriteTime() + == other.getWriteTime()); + } + result = result && (hasClusterId() == other.hasClusterId()); + if (hasClusterId()) { + result = result && getClusterId() + .equals(other.getClusterId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEncodedRegionName()) { + hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getEncodedRegionName().hashCode(); + } + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasLogSequenceNumber()) { + hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLogSequenceNumber()); + } + if (hasWriteTime()) { + hash = (37 * hash) + WRITETIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getWriteTime()); + } + if (hasClusterId()) { + hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; + hash = (53 * hash) + getClusterId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getClusterIdFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + logSequenceNumber_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + writeTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.encodedRegionName_ = encodedRegionName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.logSequenceNumber_ = logSequenceNumber_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.writeTime_ = writeTime_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (clusterIdBuilder_ == null) { + result.clusterId_ = clusterId_; + } else { + result.clusterId_ = clusterIdBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; + if (other.hasEncodedRegionName()) { + setEncodedRegionName(other.getEncodedRegionName()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasLogSequenceNumber()) { + setLogSequenceNumber(other.getLogSequenceNumber()); + } + if (other.hasWriteTime()) { + setWriteTime(other.getWriteTime()); + } + if (other.hasClusterId()) { + mergeClusterId(other.getClusterId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEncodedRegionName()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + if (!hasLogSequenceNumber()) { + + return false; + } + if (!hasWriteTime()) { + + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + encodedRegionName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + logSequenceNumber_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + writeTime_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(); + if (hasClusterId()) { + subBuilder.mergeFrom(getClusterId()); + } + input.readMessage(subBuilder, extensionRegistry); + setClusterId(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes encodedRegionName = 1; + private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + encodedRegionName_ = value; + onChanged(); + return this; + } + public Builder clearEncodedRegionName() { + bitField0_ = (bitField0_ & ~0x00000001); + encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); + onChanged(); + return this; + } + + // required bytes tableName = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required uint64 logSequenceNumber = 3; + private long logSequenceNumber_ ; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + public Builder setLogSequenceNumber(long value) { + bitField0_ |= 0x00000004; + logSequenceNumber_ = value; + onChanged(); + return this; + } + public Builder clearLogSequenceNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + logSequenceNumber_ = 0L; + onChanged(); + return this; + } + + // required uint64 writeTime = 4; + private long writeTime_ ; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + public Builder setWriteTime(long value) { + bitField0_ |= 0x00000008; + writeTime_ = value; + onChanged(); + return this; + } + public Builder clearWriteTime() { + bitField0_ = (bitField0_ & ~0x00000008); + writeTime_ = 0L; + onChanged(); + return this; + } + + // optional .UUID clusterId = 5; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { + if (clusterIdBuilder_ == null) { + return clusterId_; + } else { + return clusterIdBuilder_.getMessage(); + } + } + public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + clusterId_ = value; + onChanged(); + } else { + clusterIdBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setClusterId( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) { + if (clusterIdBuilder_ == null) { + clusterId_ = builderForValue.build(); + onChanged(); + } else { + clusterIdBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) { + clusterId_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); + } else { + clusterId_ = value; + } + onChanged(); + } else { + clusterIdBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearClusterId() { + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + onChanged(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getClusterIdFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + if (clusterIdBuilder_ != null) { + return clusterIdBuilder_.getMessageOrBuilder(); + } else { + return clusterId_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> + getClusterIdFieldBuilder() { + if (clusterIdBuilder_ == null) { + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>( + clusterId_, + getParentForChildren(), + isClean()); + clusterId_ = null; + } + return clusterIdBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) + } + + static { + defaultInstance = new WALKey(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALKey) + } + + public interface WALEditOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes keyValueBytes = 1; + java.util.List getKeyValueBytesList(); + int getKeyValueBytesCount(); + com.google.protobuf.ByteString getKeyValueBytes(int index); + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + java.util.List + getFamilyScopeList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); + int getFamilyScopeCount(); + java.util.List + getFamilyScopeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index); + } + public static final class WALEdit extends + com.google.protobuf.GeneratedMessage + implements WALEditOrBuilder { + // Use WALEdit.newBuilder() to construct. + private WALEdit(Builder builder) { + super(builder); + } + private WALEdit(boolean noInit) {} + + private static final WALEdit defaultInstance; + public static WALEdit getDefaultInstance() { + return defaultInstance; + } + + public WALEdit getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + } + + public enum ScopeType + implements com.google.protobuf.ProtocolMessageEnum { + REPLICATION_SCOPE_LOCAL(0, 0), + REPLICATION_SCOPE_GLOBAL(1, 1), + ; + + public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; + public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; + + + public final int getNumber() { return value; } + + public static ScopeType valueOf(int value) { + switch (value) { + case 0: return REPLICATION_SCOPE_LOCAL; + case 1: return REPLICATION_SCOPE_GLOBAL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ScopeType findValueByNumber(int number) { + return ScopeType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); + } + + private static final ScopeType[] VALUES = { + REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, + }; + + public static ScopeType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ScopeType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) + } + + public interface FamilyScopeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + boolean hasScopeType(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); + } + public static final class FamilyScope extends + com.google.protobuf.GeneratedMessage + implements FamilyScopeOrBuilder { + // Use FamilyScope.newBuilder() to construct. + private FamilyScope(Builder builder) { + super(builder); + } + private FamilyScope(boolean noInit) {} + + private static final FamilyScope defaultInstance; + public static FamilyScope getDefaultInstance() { + return defaultInstance; + } + + public FamilyScope getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + public static final int SCOPETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + return scopeType_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasScopeType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, scopeType_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, scopeType_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasScopeType() == other.hasScopeType()); + if (hasScopeType()) { + result = result && + (getScopeType() == other.getScopeType()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasScopeType()) { + hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getScopeType()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.scopeType_ = scopeType_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasScopeType()) { + setScopeType(other.getScopeType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasScopeType()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + scopeType_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + return scopeType_; + } + public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + scopeType_ = value; + onChanged(); + return this; + } + public Builder clearScopeType() { + bitField0_ = (bitField0_ & ~0x00000002); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) + } + + static { + defaultInstance = new FamilyScope(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) + } + + // repeated bytes keyValueBytes = 1; + public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; + private java.util.List keyValueBytes_; + public java.util.List + getKeyValueBytesList() { + return keyValueBytes_; + } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); + } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); + } + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + public static final int FAMILYSCOPE_FIELD_NUMBER = 2; + private java.util.List familyScope_; + public java.util.List getFamilyScopeList() { + return familyScope_; + } + public java.util.List + getFamilyScopeOrBuilderList() { + return familyScope_; + } + public int getFamilyScopeCount() { + return familyScope_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + return familyScope_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index) { + return familyScope_.get(index); + } + + private void initFields() { + keyValueBytes_ = java.util.Collections.emptyList();; + familyScope_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getFamilyScopeCount(); i++) { + if (!getFamilyScope(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < keyValueBytes_.size(); i++) { + output.writeBytes(1, keyValueBytes_.get(i)); + } + for (int i = 0; i < familyScope_.size(); i++) { + output.writeMessage(2, familyScope_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < keyValueBytes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValueBytes_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueBytesList().size(); + } + for (int i = 0; i < familyScope_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, familyScope_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj; + + boolean result = true; + result = result && getKeyValueBytesList() + .equals(other.getKeyValueBytesList()); + result = result && getFamilyScopeList() + .equals(other.getFamilyScopeList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getKeyValueBytesCount() > 0) { + hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueBytesList().hashCode(); + } + if (getFamilyScopeCount() > 0) { + hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER; + hash = (53 * hash) + getFamilyScopeList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFamilyScopeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + if (familyScopeBuilder_ == null) { + familyScope_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + familyScopeBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.keyValueBytes_ = keyValueBytes_; + if (familyScopeBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = java.util.Collections.unmodifiableList(familyScope_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.familyScope_ = familyScope_; + } else { + result.familyScope_ = familyScopeBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; + if (!other.keyValueBytes_.isEmpty()) { + if (keyValueBytes_.isEmpty()) { + keyValueBytes_ = other.keyValueBytes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.addAll(other.keyValueBytes_); + } + onChanged(); + } + if (familyScopeBuilder_ == null) { + if (!other.familyScope_.isEmpty()) { + if (familyScope_.isEmpty()) { + familyScope_ = other.familyScope_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyScopeIsMutable(); + familyScope_.addAll(other.familyScope_); + } + onChanged(); + } + } else { + if (!other.familyScope_.isEmpty()) { + if (familyScopeBuilder_.isEmpty()) { + familyScopeBuilder_.dispose(); + familyScopeBuilder_ = null; + familyScope_ = other.familyScope_; + bitField0_ = (bitField0_ & ~0x00000002); + familyScopeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyScopeFieldBuilder() : null; + } else { + familyScopeBuilder_.addAllMessages(other.familyScope_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getFamilyScopeCount(); i++) { + if (!getFamilyScope(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(input.readBytes()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyScope(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes keyValueBytes = 1; + private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; + private void ensureKeyValueBytesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getKeyValueBytesList() { + return java.util.Collections.unmodifiableList(keyValueBytes_); + } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); + } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); + } + public Builder setKeyValueBytes( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.set(index, value); + onChanged(); + return this; + } + public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(value); + onChanged(); + return this; + } + public Builder addAllKeyValueBytes( + java.lang.Iterable values) { + ensureKeyValueBytesIsMutable(); + super.addAll(values, keyValueBytes_); + onChanged(); + return this; + } + public Builder clearKeyValueBytes() { + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + private java.util.List familyScope_ = + java.util.Collections.emptyList(); + private void ensureFamilyScopeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = new java.util.ArrayList(familyScope_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; + + public java.util.List getFamilyScopeList() { + if (familyScopeBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyScope_); + } else { + return familyScopeBuilder_.getMessageList(); + } + } + public int getFamilyScopeCount() { + if (familyScopeBuilder_ == null) { + return familyScope_.size(); + } else { + return familyScopeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + if (familyScopeBuilder_ == null) { + return familyScope_.get(index); + } else { + return familyScopeBuilder_.getMessage(index); + } + } + public Builder setFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.set(index, value); + onChanged(); + } else { + familyScopeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.set(index, builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.add(value); + onChanged(); + } else { + familyScopeBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.add(index, value); + onChanged(); + } else { + familyScopeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyScope( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.add(builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.add(index, builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyScope( + java.lang.Iterable values) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + super.addAll(values, familyScope_); + onChanged(); + } else { + familyScopeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyScope() { + if (familyScopeBuilder_ == null) { + familyScope_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + familyScopeBuilder_.clear(); + } + return this; + } + public Builder removeFamilyScope(int index) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.remove(index); + onChanged(); + } else { + familyScopeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( + int index) { + return getFamilyScopeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index) { + if (familyScopeBuilder_ == null) { + return familyScope_.get(index); } else { + return familyScopeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyScopeOrBuilderList() { + if (familyScopeBuilder_ != null) { + return familyScopeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyScope_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { + return getFamilyScopeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( + int index) { + return getFamilyScopeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + } + public java.util.List + getFamilyScopeBuilderList() { + return getFamilyScopeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> + getFamilyScopeFieldBuilder() { + if (familyScopeBuilder_ == null) { + familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( + familyScope_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + familyScope_ = null; + } + return familyScopeBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) + } + + static { + defaultInstance = new WALEdit(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) + } + + private int bitField0_; + // required .WALEntry.WALKey key = 1; + public static final int KEY_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_; + public boolean hasKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { + return key_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { + return key_; + } + + // required .WALEntry.WALEdit edit = 2; + public static final int EDIT_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_; + public boolean hasEdit() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { + return edit_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + return edit_; + } + + private void initFields() { + key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasKey()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasEdit()) { + memoizedIsInitialized = 0; + return false; + } + if (!getKey().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getEdit().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, key_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, edit_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, key_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, edit_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; + + boolean result = true; + result = result && (hasKey() == other.hasKey()); + if (hasKey()) { + result = result && getKey() + .equals(other.getKey()); + } + result = result && (hasEdit() == other.hasEdit()); + if (hasEdit()) { + result = result && getEdit() + .equals(other.getEdit()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasKey()) { + hash = (37 * hash) + KEY_FIELD_NUMBER; + hash = (53 * hash) + getKey().hashCode(); + } + if (hasEdit()) { + hash = (37 * hash) + EDIT_FIELD_NUMBER; + hash = (53 * hash) + getEdit().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getKeyFieldBuilder(); + getEditFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (keyBuilder_ == null) { + key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + } else { + keyBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } else { + editBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (keyBuilder_ == null) { + result.key_ = key_; + } else { + result.key_ = keyBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (editBuilder_ == null) { + result.edit_ = edit_; + } else { + result.edit_ = editBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this; + if (other.hasKey()) { + mergeKey(other.getKey()); + } + if (other.hasEdit()) { + mergeEdit(other.getEdit()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasKey()) { + + return false; + } + if (!hasEdit()) { + + return false; + } + if (!getKey().isInitialized()) { + + return false; + } + if (!getEdit().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(); + if (hasKey()) { + subBuilder.mergeFrom(getKey()); + } + input.readMessage(subBuilder, extensionRegistry); + setKey(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(); + if (hasEdit()) { + subBuilder.mergeFrom(getEdit()); + } + input.readMessage(subBuilder, extensionRegistry); + setEdit(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .WALEntry.WALKey key = 1; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> keyBuilder_; + public boolean hasKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { + if (keyBuilder_ == null) { + return key_; + } else { + return keyBuilder_.getMessage(); + } + } + public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { + if (keyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + key_ = value; + onChanged(); + } else { + keyBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setKey( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) { + if (keyBuilder_ == null) { + key_ = builderForValue.build(); + onChanged(); + } else { + keyBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { + if (keyBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + key_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) { + key_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(key_).mergeFrom(value).buildPartial(); + } else { + key_ = value; + } + onChanged(); + } else { + keyBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearKey() { + if (keyBuilder_ == null) { + key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + onChanged(); + } else { + keyBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getKeyBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getKeyFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { + if (keyBuilder_ != null) { + return keyBuilder_.getMessageOrBuilder(); + } else { + return key_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> + getKeyFieldBuilder() { + if (keyBuilder_ == null) { + keyBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>( + key_, + getParentForChildren(), + isClean()); + key_ = null; + } + return keyBuilder_; + } + + // required .WALEntry.WALEdit edit = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; + public boolean hasEdit() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { + if (editBuilder_ == null) { + return edit_; + } else { + return editBuilder_.getMessage(); + } + } + public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { + if (editBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + edit_ = value; + onChanged(); + } else { + editBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setEdit( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) { + if (editBuilder_ == null) { + edit_ = builderForValue.build(); + onChanged(); + } else { + editBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { + if (editBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + edit_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) { + edit_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); + } else { + edit_ = value; + } + onChanged(); + } else { + editBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearEdit() { + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + onChanged(); + } else { + editBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getEditFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + if (editBuilder_ != null) { + return editBuilder_.getMessageOrBuilder(); + } else { + return edit_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> + getEditFieldBuilder() { + if (editBuilder_ == null) { + editBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>( + edit_, + getParentForChildren(), + isClean()); + edit_ = null; + } + return editBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry) + } + + static { + defaultInstance = new WALEntry(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry) + } + + public interface ReplicateWALEntryRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .WALEntry entry = 1; + java.util.List + getEntryList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index); + int getEntryCount(); + java.util.List + getEntryOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( + int index); + } + public static final class ReplicateWALEntryRequest extends + com.google.protobuf.GeneratedMessage + implements ReplicateWALEntryRequestOrBuilder { + // Use ReplicateWALEntryRequest.newBuilder() to construct. + private ReplicateWALEntryRequest(Builder builder) { + super(builder); + } + private ReplicateWALEntryRequest(boolean noInit) {} + + private static final ReplicateWALEntryRequest defaultInstance; + public static ReplicateWALEntryRequest getDefaultInstance() { + return defaultInstance; + } + + public ReplicateWALEntryRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + } + + // repeated .WALEntry entry = 1; + public static final int ENTRY_FIELD_NUMBER = 1; + private java.util.List entry_; + public java.util.List getEntryList() { + return entry_; + } + public java.util.List + getEntryOrBuilderList() { + return entry_; + } + public int getEntryCount() { + return entry_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) { + return entry_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( + int index) { + return entry_.get(index); + } + + private void initFields() { + entry_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getEntryCount(); i++) { + if (!getEntry(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < entry_.size(); i++) { + output.writeMessage(1, entry_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < entry_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, entry_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj; + + boolean result = true; + result = result && getEntryList() + .equals(other.getEntryList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getEntryCount() > 0) { + hash = (37 * hash) + ENTRY_FIELD_NUMBER; + hash = (53 * hash) + getEntryList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getEntryFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (entryBuilder_ == null) { + entry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + entryBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this); + int from_bitField0_ = bitField0_; + if (entryBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + entry_ = java.util.Collections.unmodifiableList(entry_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.entry_ = entry_; + } else { + result.entry_ = entryBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; + if (entryBuilder_ == null) { + if (!other.entry_.isEmpty()) { + if (entry_.isEmpty()) { + entry_ = other.entry_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureEntryIsMutable(); + entry_.addAll(other.entry_); + } + onChanged(); + } + } else { + if (!other.entry_.isEmpty()) { + if (entryBuilder_.isEmpty()) { + entryBuilder_.dispose(); + entryBuilder_ = null; + entry_ = other.entry_; + bitField0_ = (bitField0_ & ~0x00000001); + entryBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getEntryFieldBuilder() : null; + } else { + entryBuilder_.addAllMessages(other.entry_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getEntryCount(); i++) { + if (!getEntry(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addEntry(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .WALEntry entry = 1; + private java.util.List entry_ = + java.util.Collections.emptyList(); + private void ensureEntryIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + entry_ = new java.util.ArrayList(entry_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> entryBuilder_; + + public java.util.List getEntryList() { + if (entryBuilder_ == null) { + return java.util.Collections.unmodifiableList(entry_); + } else { + return entryBuilder_.getMessageList(); + } + } + public int getEntryCount() { + if (entryBuilder_ == null) { + return entry_.size(); + } else { + return entryBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) { + if (entryBuilder_ == null) { + return entry_.get(index); + } else { + return entryBuilder_.getMessage(index); + } + } + public Builder setEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { + if (entryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEntryIsMutable(); + entry_.set(index, value); + onChanged(); + } else { + entryBuilder_.setMessage(index, value); + } + return this; + } + public Builder setEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { + if (entryBuilder_ == null) { + ensureEntryIsMutable(); + entry_.set(index, builderForValue.build()); + onChanged(); + } else { + entryBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { + if (entryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEntryIsMutable(); + entry_.add(value); + onChanged(); + } else { + entryBuilder_.addMessage(value); + } + return this; + } + public Builder addEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { + if (entryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEntryIsMutable(); + entry_.add(index, value); + onChanged(); + } else { + entryBuilder_.addMessage(index, value); + } + return this; + } + public Builder addEntry( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { + if (entryBuilder_ == null) { + ensureEntryIsMutable(); + entry_.add(builderForValue.build()); + onChanged(); + } else { + entryBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { + if (entryBuilder_ == null) { + ensureEntryIsMutable(); + entry_.add(index, builderForValue.build()); + onChanged(); + } else { + entryBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllEntry( + java.lang.Iterable values) { + if (entryBuilder_ == null) { + ensureEntryIsMutable(); + super.addAll(values, entry_); + onChanged(); + } else { + entryBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearEntry() { + if (entryBuilder_ == null) { + entry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + entryBuilder_.clear(); + } + return this; + } + public Builder removeEntry(int index) { + if (entryBuilder_ == null) { + ensureEntryIsMutable(); + entry_.remove(index); + onChanged(); + } else { + entryBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getEntryBuilder( + int index) { + return getEntryFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( + int index) { + if (entryBuilder_ == null) { + return entry_.get(index); } else { + return entryBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getEntryOrBuilderList() { + if (entryBuilder_ != null) { + return entryBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(entry_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder() { + return getEntryFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder( + int index) { + return getEntryFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); + } + public java.util.List + getEntryBuilderList() { + return getEntryFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> + getEntryFieldBuilder() { + if (entryBuilder_ == null) { + entryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>( + entry_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + entry_ = null; + } + return entryBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest) + } + + static { + defaultInstance = new ReplicateWALEntryRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest) + } + + public interface ReplicateWALEntryResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ReplicateWALEntryResponse extends + com.google.protobuf.GeneratedMessage + implements ReplicateWALEntryResponseOrBuilder { + // Use ReplicateWALEntryResponse.newBuilder() to construct. + private ReplicateWALEntryResponse(Builder builder) { + super(builder); + } + private ReplicateWALEntryResponse(boolean noInit) {} + + private static final ReplicateWALEntryResponse defaultInstance; + public static ReplicateWALEntryResponse getDefaultInstance() { + return defaultInstance; + } + + public ReplicateWALEntryResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse) + } + + static { + defaultInstance = new ReplicateWALEntryResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse) + } + + public interface RollWALWriterRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class RollWALWriterRequest extends + com.google.protobuf.GeneratedMessage + implements RollWALWriterRequestOrBuilder { + // Use RollWALWriterRequest.newBuilder() to construct. + private RollWALWriterRequest(Builder builder) { + super(builder); + } + private RollWALWriterRequest(boolean noInit) {} + + private static final RollWALWriterRequest defaultInstance; + public static RollWALWriterRequest getDefaultInstance() { + return defaultInstance; + } + + public RollWALWriterRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:RollWALWriterRequest) + } + + static { + defaultInstance = new RollWALWriterRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RollWALWriterRequest) + } + + public interface RollWALWriterResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes regionToFlush = 1; + java.util.List getRegionToFlushList(); + int getRegionToFlushCount(); + com.google.protobuf.ByteString getRegionToFlush(int index); + } + public static final class RollWALWriterResponse extends + com.google.protobuf.GeneratedMessage + implements RollWALWriterResponseOrBuilder { + // Use RollWALWriterResponse.newBuilder() to construct. + private RollWALWriterResponse(Builder builder) { + super(builder); + } + private RollWALWriterResponse(boolean noInit) {} + + private static final RollWALWriterResponse defaultInstance; + public static RollWALWriterResponse getDefaultInstance() { + return defaultInstance; + } + + public RollWALWriterResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + } + + // repeated bytes regionToFlush = 1; + public static final int REGIONTOFLUSH_FIELD_NUMBER = 1; + private java.util.List regionToFlush_; + public java.util.List + getRegionToFlushList() { + return regionToFlush_; + } + public int getRegionToFlushCount() { + return regionToFlush_.size(); + } + public com.google.protobuf.ByteString getRegionToFlush(int index) { + return regionToFlush_.get(index); + } + + private void initFields() { + regionToFlush_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionToFlush_.size(); i++) { + output.writeBytes(1, regionToFlush_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < regionToFlush_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(regionToFlush_.get(i)); + } + size += dataSize; + size += 1 * getRegionToFlushList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj; + + boolean result = true; + result = result && getRegionToFlushList() + .equals(other.getRegionToFlushList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionToFlushCount() > 0) { + hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER; + hash = (53 * hash) + getRegionToFlushList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionToFlush_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionToFlush_ = regionToFlush_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; + if (!other.regionToFlush_.isEmpty()) { + if (regionToFlush_.isEmpty()) { + regionToFlush_ = other.regionToFlush_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionToFlushIsMutable(); + regionToFlush_.addAll(other.regionToFlush_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureRegionToFlushIsMutable(); + regionToFlush_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes regionToFlush = 1; + private java.util.List regionToFlush_ = java.util.Collections.emptyList();; + private void ensureRegionToFlushIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = new java.util.ArrayList(regionToFlush_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getRegionToFlushList() { + return java.util.Collections.unmodifiableList(regionToFlush_); + } + public int getRegionToFlushCount() { + return regionToFlush_.size(); + } + public com.google.protobuf.ByteString getRegionToFlush(int index) { + return regionToFlush_.get(index); + } + public Builder setRegionToFlush( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionToFlushIsMutable(); + regionToFlush_.set(index, value); + onChanged(); + return this; + } + public Builder addRegionToFlush(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionToFlushIsMutable(); + regionToFlush_.add(value); + onChanged(); + return this; + } + public Builder addAllRegionToFlush( + java.lang.Iterable values) { + ensureRegionToFlushIsMutable(); + super.addAll(values, regionToFlush_); + onChanged(); + return this; + } + public Builder clearRegionToFlush() { + regionToFlush_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RollWALWriterResponse) + } + + static { + defaultInstance = new RollWALWriterResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RollWALWriterResponse) + } + + public interface StopServerRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string reason = 1; + boolean hasReason(); + String getReason(); + } + public static final class StopServerRequest extends + com.google.protobuf.GeneratedMessage + implements StopServerRequestOrBuilder { + // Use StopServerRequest.newBuilder() to construct. + private StopServerRequest(Builder builder) { + super(builder); + } + private StopServerRequest(boolean noInit) {} + + private static final StopServerRequest defaultInstance; + public static StopServerRequest getDefaultInstance() { + return defaultInstance; + } + + public StopServerRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + } + + private int bitField0_; + // required string reason = 1; + public static final int REASON_FIELD_NUMBER = 1; + private java.lang.Object reason_; + public boolean hasReason() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getReason() { + java.lang.Object ref = reason_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + reason_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getReasonBytes() { + java.lang.Object ref = reason_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + reason_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + reason_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasReason()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getReasonBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getReasonBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj; + + boolean result = true; + result = result && (hasReason() == other.hasReason()); + if (hasReason()) { + result = result && getReason() + .equals(other.getReason()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasReason()) { + hash = (37 * hash) + REASON_FIELD_NUMBER; + hash = (53 * hash) + getReason().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + reason_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.reason_ = reason_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this; + if (other.hasReason()) { + setReason(other.getReason()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasReason()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + reason_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string reason = 1; + private java.lang.Object reason_ = ""; + public boolean hasReason() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getReason() { + java.lang.Object ref = reason_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + reason_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setReason(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + reason_ = value; + onChanged(); + return this; + } + public Builder clearReason() { + bitField0_ = (bitField0_ & ~0x00000001); + reason_ = getDefaultInstance().getReason(); + onChanged(); + return this; + } + void setReason(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + reason_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:StopServerRequest) + } + + static { + defaultInstance = new StopServerRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopServerRequest) + } + + public interface StopServerResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class StopServerResponse extends + com.google.protobuf.GeneratedMessage + implements StopServerResponseOrBuilder { + // Use StopServerResponse.newBuilder() to construct. + private StopServerResponse(Builder builder) { + super(builder); + } + private StopServerResponse(boolean noInit) {} + + private static final StopServerResponse defaultInstance; + public static StopServerResponse getDefaultInstance() { + return defaultInstance; + } + + public StopServerResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:StopServerResponse) + } + + static { + defaultInstance = new StopServerResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopServerResponse) + } + + public interface GetServerInfoRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetServerInfoRequest extends + com.google.protobuf.GeneratedMessage + implements GetServerInfoRequestOrBuilder { + // Use GetServerInfoRequest.newBuilder() to construct. + private GetServerInfoRequest(Builder builder) { + super(builder); + } + private GetServerInfoRequest(boolean noInit) {} + + private static final GetServerInfoRequest defaultInstance; + public static GetServerInfoRequest getDefaultInstance() { + return defaultInstance; + } + + public GetServerInfoRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetServerInfoRequest) + } + + static { + defaultInstance = new GetServerInfoRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetServerInfoRequest) + } + + public interface ServerInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName serverName = 1; + boolean hasServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); + + // optional uint32 webuiPort = 2; + boolean hasWebuiPort(); + int getWebuiPort(); + } + public static final class ServerInfo extends + com.google.protobuf.GeneratedMessage + implements ServerInfoOrBuilder { + // Use ServerInfo.newBuilder() to construct. + private ServerInfo(Builder builder) { + super(builder); + } + private ServerInfo(boolean noInit) {} + + private static final ServerInfo defaultInstance; + public static ServerInfo getDefaultInstance() { + return defaultInstance; + } + + public ServerInfo getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName serverName = 1; + public static final int SERVERNAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + return serverName_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + return serverName_; + } + + // optional uint32 webuiPort = 2; + public static final int WEBUIPORT_FIELD_NUMBER = 2; + private int webuiPort_; + public boolean hasWebuiPort() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getWebuiPort() { + return webuiPort_; + } + + private void initFields() { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + webuiPort_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServerName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, serverName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, webuiPort_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, webuiPort_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) obj; + + boolean result = true; + result = result && (hasServerName() == other.hasServerName()); + if (hasServerName()) { + result = result && getServerName() + .equals(other.getServerName()); + } + result = result && (hasWebuiPort() == other.hasWebuiPort()); + if (hasWebuiPort()) { + result = result && (getWebuiPort() + == other.getWebuiPort()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServerName()) { + hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; + hash = (53 * hash) + getServerName().hashCode(); + } + if (hasWebuiPort()) { + hash = (37 * hash) + WEBUIPORT_FIELD_NUMBER; + hash = (53 * hash) + getWebuiPort(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + webuiPort_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverNameBuilder_ == null) { + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.webuiPort_ = webuiPort_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) return this; + if (other.hasServerName()) { + mergeServerName(other.getServerName()); + } + if (other.hasWebuiPort()) { + setWebuiPort(other.getWebuiPort()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServerName()) { + + return false; + } + if (!getServerName().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServerName()) { + subBuilder.mergeFrom(getServerName()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerName(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + webuiPort_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName serverName = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + if (serverNameBuilder_ == null) { + return serverName_; + } else { + return serverNameBuilder_.getMessage(); + } + } + public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverName_ = value; + onChanged(); + } else { + serverNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + serverName_ = builderForValue.build(); + onChanged(); + } else { + serverNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + serverName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); + } else { + serverName_ = value; + } + onChanged(); + } else { + serverNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerNameFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilder(); + } else { + return serverName_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // optional uint32 webuiPort = 2; + private int webuiPort_ ; + public boolean hasWebuiPort() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getWebuiPort() { + return webuiPort_; + } + public Builder setWebuiPort(int value) { + bitField0_ |= 0x00000002; + webuiPort_ = value; + onChanged(); + return this; + } + public Builder clearWebuiPort() { + bitField0_ = (bitField0_ & ~0x00000002); + webuiPort_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ServerInfo) + } + + static { + defaultInstance = new ServerInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ServerInfo) + } + + public interface GetServerInfoResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerInfo serverInfo = 1; + boolean hasServerInfo(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder(); + } + public static final class GetServerInfoResponse extends + com.google.protobuf.GeneratedMessage + implements GetServerInfoResponseOrBuilder { + // Use GetServerInfoResponse.newBuilder() to construct. + private GetServerInfoResponse(Builder builder) { + super(builder); + } + private GetServerInfoResponse(boolean noInit) {} + + private static final GetServerInfoResponse defaultInstance; + public static GetServerInfoResponse getDefaultInstance() { + return defaultInstance; + } + + public GetServerInfoResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerInfo serverInfo = 1; + public static final int SERVERINFO_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_; + public boolean hasServerInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { + return serverInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { + return serverInfo_; + } + + private void initFields() { + serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServerInfo()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerInfo().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, serverInfo_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverInfo_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj; + + boolean result = true; + result = result && (hasServerInfo() == other.hasServerInfo()); + if (hasServerInfo()) { + result = result && getServerInfo() + .equals(other.getServerInfo()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServerInfo()) { + hash = (37 * hash) + SERVERINFO_FIELD_NUMBER; + hash = (53 * hash) + getServerInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverInfoBuilder_ == null) { + serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + } else { + serverInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverInfoBuilder_ == null) { + result.serverInfo_ = serverInfo_; + } else { + result.serverInfo_ = serverInfoBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; + if (other.hasServerInfo()) { + mergeServerInfo(other.getServerInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServerInfo()) { + + return false; + } + if (!getServerInfo().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(); + if (hasServerInfo()) { + subBuilder.mergeFrom(getServerInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerInfo serverInfo = 1; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_; + public boolean hasServerInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { + if (serverInfoBuilder_ == null) { + return serverInfo_; + } else { + return serverInfoBuilder_.getMessage(); + } + } + public Builder setServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) { + if (serverInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverInfo_ = value; + onChanged(); + } else { + serverInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServerInfo( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder builderForValue) { + if (serverInfoBuilder_ == null) { + serverInfo_ = builderForValue.build(); + onChanged(); + } else { + serverInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) { + if (serverInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + serverInfo_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) { + serverInfo_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(serverInfo_).mergeFrom(value).buildPartial(); + } else { + serverInfo_ = value; + } + onChanged(); + } else { + serverInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServerInfo() { + if (serverInfoBuilder_ == null) { + serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + onChanged(); + } else { + serverInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder getServerInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { + if (serverInfoBuilder_ != null) { + return serverInfoBuilder_.getMessageOrBuilder(); + } else { + return serverInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> + getServerInfoFieldBuilder() { + if (serverInfoBuilder_ == null) { + serverInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder>( + serverInfo_, + getParentForChildren(), + isClean()); + serverInfo_ = null; + } + return serverInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetServerInfoResponse) + } + + static { + defaultInstance = new GetServerInfoResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetServerInfoResponse) + } + + public static abstract class AdminService + implements com.google.protobuf.Service { + protected AdminService() {} + + public interface Interface { + public abstract void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getStoreFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new AdminService() { + @java.lang.Override + public void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRegionInfo(controller, request, done); + } + + @java.lang.Override + public void getStoreFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, + com.google.protobuf.RpcCallback done) { + impl.getStoreFile(controller, request, done); + } + + @java.lang.Override + public void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.getOnlineRegion(controller, request, done); + } + + @java.lang.Override + public void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.openRegion(controller, request, done); + } + + @java.lang.Override + public void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.closeRegion(controller, request, done); + } + + @java.lang.Override + public void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.flushRegion(controller, request, done); + } + + @java.lang.Override + public void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.splitRegion(controller, request, done); + } + + @java.lang.Override + public void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.compactRegion(controller, request, done); + } + + @java.lang.Override + public void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { + impl.replicateWALEntry(controller, request, done); + } + + @java.lang.Override + public void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { + impl.rollWALWriter(controller, request, done); + } + + @java.lang.Override + public void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { + impl.getServerInfo(controller, request, done); + } + + @java.lang.Override + public void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { + impl.stopServer(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request); + case 1: + return impl.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request); + case 2: + return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request); + case 3: + return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request); + case 4: + return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request); + case 5: + return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request); + case 6: + return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request); + case 7: + return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request); + case 8: + return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request); + case 9: + return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request); + case 10: + return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request); + case 11: + return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getStoreFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 8: + this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 9: + this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 10: + this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 11: + this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance())); + } + + public void getStoreFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance())); + } + + public void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance())); + } + + public void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance())); + } + + public void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance())); + } + + public void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance())); + } + + public void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance())); + } + + public void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance())); + } + + public void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); + } + + public void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance())); + } + + public void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance())); + } + + public void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRegionInfoRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRegionInfoRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRegionInfoResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRegionInfoResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetStoreFileRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetStoreFileRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetStoreFileResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetStoreFileResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetOnlineRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetOnlineRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetOnlineRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetOnlineRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CloseRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CloseRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CloseRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CloseRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FlushRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FlushRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FlushRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FlushRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UUID_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UUID_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALEdit_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALEdit_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicateWALEntryRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicateWALEntryResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RollWALWriterRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RollWALWriterRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RollWALWriterResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RollWALWriterResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopServerRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopServerRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopServerResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopServerResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetServerInfoRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetServerInfoRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ServerInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ServerInfo_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetServerInfoResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetServerInfoResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\013Admin.proto\032\013hbase.proto\"Q\n\024GetRegionI" + + "nfoRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + + "fier\022\027\n\017compactionState\030\002 \001(\010\"\301\001\n\025GetReg" + + "ionInfoResponse\022\037\n\nregionInfo\030\001 \002(\0132\013.Re" + + "gionInfo\022?\n\017compactionState\030\002 \001(\0162&.GetR" + + "egionInfoResponse.CompactionState\"F\n\017Com" + + "pactionState\022\010\n\004NONE\020\000\022\t\n\005MINOR\020\001\022\t\n\005MAJ" + + "OR\020\002\022\023\n\017MAJOR_AND_MINOR\020\003\"G\n\023GetStoreFil" + + "eRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + + "er\022\016\n\006family\030\002 \003(\014\")\n\024GetStoreFileRespon", + "se\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetOnlineRegion" + + "Request\":\n\027GetOnlineRegionResponse\022\037\n\nre" + + "gionInfo\030\001 \003(\0132\013.RegionInfo\"\225\001\n\021OpenRegi" + + "onRequest\0223\n\010openInfo\030\001 \003(\0132!.OpenRegion" + + "Request.RegionOpenInfo\032K\n\016RegionOpenInfo" + + "\022\033\n\006region\030\001 \002(\0132\013.RegionInfo\022\034\n\024version" + + "OfOfflineNode\030\002 \001(\r\"\234\001\n\022OpenRegionRespon" + + "se\022<\n\014openingState\030\001 \003(\0162&.OpenRegionRes" + + "ponse.RegionOpeningState\"H\n\022RegionOpenin" + + "gState\022\n\n\006OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022", + "\n\016FAILED_OPENING\020\002\"\232\001\n\022CloseRegionReques" + + "t\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024v" + + "ersionOfClosingNode\030\002 \001(\r\022\034\n\016transitionI" + + "nZK\030\003 \001(\010:\004true\022&\n\021destinationServer\030\004 \001" + + "(\0132\013.ServerName\"%\n\023CloseRegionResponse\022\016" + + "\n\006closed\030\001 \002(\010\"M\n\022FlushRegionRequest\022 \n\006" + + "region\030\001 \002(\0132\020.RegionSpecifier\022\025\n\rifOlde" + + "rThanTs\030\002 \001(\004\"=\n\023FlushRegionResponse\022\025\n\r" + + "lastFlushTime\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022" + + "SplitRegionRequest\022 \n\006region\030\001 \002(\0132\020.Reg", + "ionSpecifier\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023Spli" + + "tRegionResponse\"W\n\024CompactRegionRequest\022" + + " \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\r\n\005maj" + + "or\030\002 \001(\010\022\016\n\006family\030\003 \001(\014\"\027\n\025CompactRegio" + + "nResponse\"1\n\004UUID\022\024\n\014leastSigBits\030\001 \002(\004\022" + + "\023\n\013mostSigBits\030\002 \002(\004\"\270\003\n\010WALEntry\022\035\n\003key" + + "\030\001 \002(\0132\020.WALEntry.WALKey\022\037\n\004edit\030\002 \002(\0132\021" + + ".WALEntry.WALEdit\032~\n\006WALKey\022\031\n\021encodedRe" + + "gionName\030\001 \002(\014\022\021\n\ttableName\030\002 \002(\014\022\031\n\021log" + + "SequenceNumber\030\003 \002(\004\022\021\n\twriteTime\030\004 \002(\004\022", + "\030\n\tclusterId\030\005 \001(\0132\005.UUID\032\353\001\n\007WALEdit\022\025\n" + + "\rkeyValueBytes\030\001 \003(\014\0222\n\013familyScope\030\002 \003(" + + "\0132\035.WALEntry.WALEdit.FamilyScope\032M\n\013Fami" + + "lyScope\022\016\n\006family\030\001 \002(\014\022.\n\tscopeType\030\002 \002" + + "(\0162\033.WALEntry.WALEdit.ScopeType\"F\n\tScope" + + "Type\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030REP" + + "LICATION_SCOPE_GLOBAL\020\001\"4\n\030ReplicateWALE" + + "ntryRequest\022\030\n\005entry\030\001 \003(\0132\t.WALEntry\"\033\n" + + "\031ReplicateWALEntryResponse\"\026\n\024RollWALWri" + + "terRequest\".\n\025RollWALWriterResponse\022\025\n\rr", + "egionToFlush\030\001 \003(\014\"#\n\021StopServerRequest\022" + + "\016\n\006reason\030\001 \002(\t\"\024\n\022StopServerResponse\"\026\n" + + "\024GetServerInfoRequest\"@\n\nServerInfo\022\037\n\ns" + + "erverName\030\001 \002(\0132\013.ServerName\022\021\n\twebuiPor" + + "t\030\002 \001(\r\"8\n\025GetServerInfoResponse\022\037\n\nserv" + + "erInfo\030\001 \002(\0132\013.ServerInfo2\371\005\n\014AdminServi" + + "ce\022>\n\rgetRegionInfo\022\025.GetRegionInfoReque" + + "st\032\026.GetRegionInfoResponse\022;\n\014getStoreFi" + + "le\022\024.GetStoreFileRequest\032\025.GetStoreFileR" + + "esponse\022D\n\017getOnlineRegion\022\027.GetOnlineRe", + "gionRequest\032\030.GetOnlineRegionResponse\0225\n" + + "\nopenRegion\022\022.OpenRegionRequest\032\023.OpenRe" + + "gionResponse\0228\n\013closeRegion\022\023.CloseRegio" + + "nRequest\032\024.CloseRegionResponse\0228\n\013flushR" + + "egion\022\023.FlushRegionRequest\032\024.FlushRegion" + + "Response\0228\n\013splitRegion\022\023.SplitRegionReq" + + "uest\032\024.SplitRegionResponse\022>\n\rcompactReg" + + "ion\022\025.CompactRegionRequest\032\026.CompactRegi" + + "onResponse\022J\n\021replicateWALEntry\022\031.Replic" + + "ateWALEntryRequest\032\032.ReplicateWALEntryRe", + "sponse\022>\n\rrollWALWriter\022\025.RollWALWriterR" + + "equest\032\026.RollWALWriterResponse\022>\n\rgetSer" + + "verInfo\022\025.GetServerInfoRequest\032\026.GetServ" + + "erInfoResponse\0225\n\nstopServer\022\022.StopServe" + + "rRequest\032\023.StopServerResponseBA\n*org.apa" + + "che.hadoop.hbase.protobuf.generatedB\013Adm" + + "inProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_GetRegionInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_GetRegionInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRegionInfoRequest_descriptor, + new java.lang.String[] { "Region", "CompactionState", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); + internal_static_GetRegionInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_GetRegionInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRegionInfoResponse_descriptor, + new java.lang.String[] { "RegionInfo", "CompactionState", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); + internal_static_GetStoreFileRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_GetStoreFileRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetStoreFileRequest_descriptor, + new java.lang.String[] { "Region", "Family", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); + internal_static_GetStoreFileResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GetStoreFileResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetStoreFileResponse_descriptor, + new java.lang.String[] { "StoreFile", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); + internal_static_GetOnlineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetOnlineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetOnlineRegionRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); + internal_static_GetOnlineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_GetOnlineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetOnlineRegionResponse_descriptor, + new java.lang.String[] { "RegionInfo", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); + internal_static_OpenRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_OpenRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionRequest_descriptor, + new java.lang.String[] { "OpenInfo", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); + internal_static_OpenRegionRequest_RegionOpenInfo_descriptor = + internal_static_OpenRegionRequest_descriptor.getNestedTypes().get(0); + internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionRequest_RegionOpenInfo_descriptor, + new java.lang.String[] { "Region", "VersionOfOfflineNode", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); + internal_static_OpenRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_OpenRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionResponse_descriptor, + new java.lang.String[] { "OpeningState", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); + internal_static_CloseRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_CloseRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CloseRegionRequest_descriptor, + new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); + internal_static_CloseRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_CloseRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CloseRegionResponse_descriptor, + new java.lang.String[] { "Closed", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); + internal_static_FlushRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_FlushRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FlushRegionRequest_descriptor, + new java.lang.String[] { "Region", "IfOlderThanTs", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); + internal_static_FlushRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_FlushRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FlushRegionResponse_descriptor, + new java.lang.String[] { "LastFlushTime", "Flushed", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); + internal_static_SplitRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_SplitRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitRegionRequest_descriptor, + new java.lang.String[] { "Region", "SplitPoint", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); + internal_static_SplitRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_SplitRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); + internal_static_CompactRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_CompactRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactRegionRequest_descriptor, + new java.lang.String[] { "Region", "Major", "Family", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); + internal_static_CompactRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_CompactRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); + internal_static_UUID_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_UUID_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UUID_descriptor, + new java.lang.String[] { "LeastSigBits", "MostSigBits", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); + internal_static_WALEntry_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_WALEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_descriptor, + new java.lang.String[] { "Key", "Edit", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); + internal_static_WALEntry_WALKey_descriptor = + internal_static_WALEntry_descriptor.getNestedTypes().get(0); + internal_static_WALEntry_WALKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALKey_descriptor, + new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); + internal_static_WALEntry_WALEdit_descriptor = + internal_static_WALEntry_descriptor.getNestedTypes().get(1); + internal_static_WALEntry_WALEdit_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALEdit_descriptor, + new java.lang.String[] { "KeyValueBytes", "FamilyScope", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); + internal_static_WALEntry_WALEdit_FamilyScope_descriptor = + internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); + internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALEdit_FamilyScope_descriptor, + new java.lang.String[] { "Family", "ScopeType", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); + internal_static_ReplicateWALEntryRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicateWALEntryRequest_descriptor, + new java.lang.String[] { "Entry", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); + internal_static_ReplicateWALEntryResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicateWALEntryResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); + internal_static_RollWALWriterRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_RollWALWriterRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RollWALWriterRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); + internal_static_RollWALWriterResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_RollWALWriterResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RollWALWriterResponse_descriptor, + new java.lang.String[] { "RegionToFlush", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); + internal_static_StopServerRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_StopServerRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopServerRequest_descriptor, + new java.lang.String[] { "Reason", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); + internal_static_StopServerResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_StopServerResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopServerResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); + internal_static_GetServerInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_GetServerInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetServerInfoRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); + internal_static_ServerInfo_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_ServerInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ServerInfo_descriptor, + new java.lang.String[] { "ServerName", "WebuiPort", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class); + internal_static_GetServerInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_GetServerInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetServerInfoResponse_descriptor, + new java.lang.String[] { "ServerInfo", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java new file mode 100644 index 0000000..ad1dbeb --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java @@ -0,0 +1,1829 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Aggregate.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class AggregateProtos { + private AggregateProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface AggregateArgumentOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string interpreterClassName = 1; + boolean hasInterpreterClassName(); + String getInterpreterClassName(); + + // required .Scan scan = 2; + boolean hasScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // optional bytes interpreterSpecificBytes = 3; + boolean hasInterpreterSpecificBytes(); + com.google.protobuf.ByteString getInterpreterSpecificBytes(); + } + public static final class AggregateArgument extends + com.google.protobuf.GeneratedMessage + implements AggregateArgumentOrBuilder { + // Use AggregateArgument.newBuilder() to construct. + private AggregateArgument(Builder builder) { + super(builder); + } + private AggregateArgument(boolean noInit) {} + + private static final AggregateArgument defaultInstance; + public static AggregateArgument getDefaultInstance() { + return defaultInstance; + } + + public AggregateArgument getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable; + } + + private int bitField0_; + // required string interpreterClassName = 1; + public static final int INTERPRETERCLASSNAME_FIELD_NUMBER = 1; + private java.lang.Object interpreterClassName_; + public boolean hasInterpreterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getInterpreterClassName() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + interpreterClassName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getInterpreterClassNameBytes() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + interpreterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required .Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // optional bytes interpreterSpecificBytes = 3; + public static final int INTERPRETERSPECIFICBYTES_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString interpreterSpecificBytes_; + public boolean hasInterpreterSpecificBytes() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getInterpreterSpecificBytes() { + return interpreterSpecificBytes_; + } + + private void initFields() { + interpreterClassName_ = ""; + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasInterpreterClassName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasScan()) { + memoizedIsInitialized = 0; + return false; + } + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getInterpreterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, interpreterSpecificBytes_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getInterpreterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, interpreterSpecificBytes_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) obj; + + boolean result = true; + result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); + if (hasInterpreterClassName()) { + result = result && getInterpreterClassName() + .equals(other.getInterpreterClassName()); + } + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); + if (hasInterpreterSpecificBytes()) { + result = result && getInterpreterSpecificBytes() + .equals(other.getInterpreterSpecificBytes()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasInterpreterClassName()) { + hash = (37 * hash) + INTERPRETERCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getInterpreterClassName().hashCode(); + } + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasInterpreterSpecificBytes()) { + hash = (37 * hash) + INTERPRETERSPECIFICBYTES_FIELD_NUMBER; + hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgumentOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + interpreterClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument build() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.interpreterClassName_ = interpreterClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.interpreterSpecificBytes_ = interpreterSpecificBytes_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance()) return this; + if (other.hasInterpreterClassName()) { + setInterpreterClassName(other.getInterpreterClassName()); + } + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasInterpreterSpecificBytes()) { + setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasInterpreterClassName()) { + + return false; + } + if (!hasScan()) { + + return false; + } + if (!getScan().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + interpreterClassName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); + if (hasScan()) { + subBuilder.mergeFrom(getScan()); + } + input.readMessage(subBuilder, extensionRegistry); + setScan(subBuilder.buildPartial()); + break; + } + case 26: { + bitField0_ |= 0x00000004; + interpreterSpecificBytes_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string interpreterClassName = 1; + private java.lang.Object interpreterClassName_ = ""; + public boolean hasInterpreterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getInterpreterClassName() { + java.lang.Object ref = interpreterClassName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + interpreterClassName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setInterpreterClassName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + interpreterClassName_ = value; + onChanged(); + return this; + } + public Builder clearInterpreterClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); + onChanged(); + return this; + } + void setInterpreterClassName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + interpreterClassName_ = value; + onChanged(); + } + + // required .Scan scan = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // optional bytes interpreterSpecificBytes = 3; + private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasInterpreterSpecificBytes() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getInterpreterSpecificBytes() { + return interpreterSpecificBytes_; + } + public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + interpreterSpecificBytes_ = value; + onChanged(); + return this; + } + public Builder clearInterpreterSpecificBytes() { + bitField0_ = (bitField0_ & ~0x00000004); + interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:AggregateArgument) + } + + static { + defaultInstance = new AggregateArgument(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AggregateArgument) + } + + public interface AggregateResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes firstPart = 1; + java.util.List getFirstPartList(); + int getFirstPartCount(); + com.google.protobuf.ByteString getFirstPart(int index); + + // optional bytes secondPart = 2; + boolean hasSecondPart(); + com.google.protobuf.ByteString getSecondPart(); + } + public static final class AggregateResponse extends + com.google.protobuf.GeneratedMessage + implements AggregateResponseOrBuilder { + // Use AggregateResponse.newBuilder() to construct. + private AggregateResponse(Builder builder) { + super(builder); + } + private AggregateResponse(boolean noInit) {} + + private static final AggregateResponse defaultInstance; + public static AggregateResponse getDefaultInstance() { + return defaultInstance; + } + + public AggregateResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable; + } + + private int bitField0_; + // repeated bytes firstPart = 1; + public static final int FIRSTPART_FIELD_NUMBER = 1; + private java.util.List firstPart_; + public java.util.List + getFirstPartList() { + return firstPart_; + } + public int getFirstPartCount() { + return firstPart_.size(); + } + public com.google.protobuf.ByteString getFirstPart(int index) { + return firstPart_.get(index); + } + + // optional bytes secondPart = 2; + public static final int SECONDPART_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString secondPart_; + public boolean hasSecondPart() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getSecondPart() { + return secondPart_; + } + + private void initFields() { + firstPart_ = java.util.Collections.emptyList();; + secondPart_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < firstPart_.size(); i++) { + output.writeBytes(1, firstPart_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(2, secondPart_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < firstPart_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(firstPart_.get(i)); + } + size += dataSize; + size += 1 * getFirstPartList().size(); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, secondPart_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj; + + boolean result = true; + result = result && getFirstPartList() + .equals(other.getFirstPartList()); + result = result && (hasSecondPart() == other.hasSecondPart()); + if (hasSecondPart()) { + result = result && getSecondPart() + .equals(other.getSecondPart()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFirstPartCount() > 0) { + hash = (37 * hash) + FIRSTPART_FIELD_NUMBER; + hash = (53 * hash) + getFirstPartList().hashCode(); + } + if (hasSecondPart()) { + hash = (37 * hash) + SECONDPART_FIELD_NUMBER; + hash = (53 * hash) + getSecondPart().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + firstPart_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + secondPart_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = java.util.Collections.unmodifiableList(firstPart_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.firstPart_ = firstPart_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.secondPart_ = secondPart_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this; + if (!other.firstPart_.isEmpty()) { + if (firstPart_.isEmpty()) { + firstPart_ = other.firstPart_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFirstPartIsMutable(); + firstPart_.addAll(other.firstPart_); + } + onChanged(); + } + if (other.hasSecondPart()) { + setSecondPart(other.getSecondPart()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureFirstPartIsMutable(); + firstPart_.add(input.readBytes()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + secondPart_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes firstPart = 1; + private java.util.List firstPart_ = java.util.Collections.emptyList();; + private void ensureFirstPartIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + firstPart_ = new java.util.ArrayList(firstPart_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getFirstPartList() { + return java.util.Collections.unmodifiableList(firstPart_); + } + public int getFirstPartCount() { + return firstPart_.size(); + } + public com.google.protobuf.ByteString getFirstPart(int index) { + return firstPart_.get(index); + } + public Builder setFirstPart( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFirstPartIsMutable(); + firstPart_.set(index, value); + onChanged(); + return this; + } + public Builder addFirstPart(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFirstPartIsMutable(); + firstPart_.add(value); + onChanged(); + return this; + } + public Builder addAllFirstPart( + java.lang.Iterable values) { + ensureFirstPartIsMutable(); + super.addAll(values, firstPart_); + onChanged(); + return this; + } + public Builder clearFirstPart() { + firstPart_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // optional bytes secondPart = 2; + private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSecondPart() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSecondPart() { + return secondPart_; + } + public Builder setSecondPart(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + secondPart_ = value; + onChanged(); + return this; + } + public Builder clearSecondPart() { + bitField0_ = (bitField0_ & ~0x00000002); + secondPart_ = getDefaultInstance().getSecondPart(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:AggregateResponse) + } + + static { + defaultInstance = new AggregateResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AggregateResponse) + } + + public static abstract class AggregateService + implements com.google.protobuf.Service { + protected AggregateService() {} + + public interface Interface { + public abstract void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new AggregateService() { + @java.lang.Override + public void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getMax(controller, request, done); + } + + @java.lang.Override + public void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getMin(controller, request, done); + } + + @java.lang.Override + public void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getSum(controller, request, done); + } + + @java.lang.Override + public void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getRowNum(controller, request, done); + } + + @java.lang.Override + public void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getAvg(controller, request, done); + } + + @java.lang.Override + public void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getStd(controller, request, done); + } + + @java.lang.Override + public void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + impl.getMedian(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + case 1: + return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + case 2: + return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + case 3: + return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + case 4: + return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + case 5: + return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + case 6: + return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public abstract void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + + public void getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AggregateArgument_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AggregateArgument_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AggregateResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AggregateResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\017Aggregate.proto\032\014Client.proto\"h\n\021Aggre" + + "gateArgument\022\034\n\024interpreterClassName\030\001 \002" + + "(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022 \n\030interpreterSp" + + "ecificBytes\030\003 \001(\014\":\n\021AggregateResponse\022\021" + + "\n\tfirstPart\030\001 \003(\014\022\022\n\nsecondPart\030\002 \001(\0142\366\002" + + "\n\020AggregateService\0220\n\006getMax\022\022.Aggregate" + + "Argument\032\022.AggregateResponse\0220\n\006getMin\022\022" + + ".AggregateArgument\032\022.AggregateResponse\0220" + + "\n\006getSum\022\022.AggregateArgument\032\022.Aggregate" + + "Response\0223\n\tgetRowNum\022\022.AggregateArgumen", + "t\032\022.AggregateResponse\0220\n\006getAvg\022\022.Aggreg" + + "ateArgument\032\022.AggregateResponse\0220\n\006getSt" + + "d\022\022.AggregateArgument\032\022.AggregateRespons" + + "e\0223\n\tgetMedian\022\022.AggregateArgument\032\022.Agg" + + "regateResponseBE\n*org.apache.hadoop.hbas" + + "e.protobuf.generatedB\017AggregateProtosH\001\210" + + "\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_AggregateArgument_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_AggregateArgument_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AggregateArgument_descriptor, + new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", }, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class); + internal_static_AggregateResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_AggregateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AggregateResponse_descriptor, + new java.lang.String[] { "FirstPart", "SecondPart", }, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java new file mode 100644 index 0000000..05a43f6 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -0,0 +1,24104 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Client.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ClientProtos { + private ClientProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ColumnOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // repeated bytes qualifier = 2; + java.util.List getQualifierList(); + int getQualifierCount(); + com.google.protobuf.ByteString getQualifier(int index); + } + public static final class Column extends + com.google.protobuf.GeneratedMessage + implements ColumnOrBuilder { + // Use Column.newBuilder() to construct. + private Column(Builder builder) { + super(builder); + } + private Column(boolean noInit) {} + + private static final Column defaultInstance; + public static Column getDefaultInstance() { + return defaultInstance; + } + + public Column getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // repeated bytes qualifier = 2; + public static final int QUALIFIER_FIELD_NUMBER = 2; + private java.util.List qualifier_; + public java.util.List + getQualifierList() { + return qualifier_; + } + public int getQualifierCount() { + return qualifier_.size(); + } + public com.google.protobuf.ByteString getQualifier(int index) { + return qualifier_.get(index); + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + for (int i = 0; i < qualifier_.size(); i++) { + output.writeBytes(2, qualifier_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + { + int dataSize = 0; + for (int i = 0; i < qualifier_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(qualifier_.get(i)); + } + size += dataSize; + size += 1 * getQualifierList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && getQualifierList() + .equals(other.getQualifierList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (getQualifierCount() > 0) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifierList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = java.util.Collections.unmodifiableList(qualifier_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (!other.qualifier_.isEmpty()) { + if (qualifier_.isEmpty()) { + qualifier_ = other.qualifier_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureQualifierIsMutable(); + qualifier_.addAll(other.qualifier_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + ensureQualifierIsMutable(); + qualifier_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // repeated bytes qualifier = 2; + private java.util.List qualifier_ = java.util.Collections.emptyList();; + private void ensureQualifierIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = new java.util.ArrayList(qualifier_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getQualifierList() { + return java.util.Collections.unmodifiableList(qualifier_); + } + public int getQualifierCount() { + return qualifier_.size(); + } + public com.google.protobuf.ByteString getQualifier(int index) { + return qualifier_.get(index); + } + public Builder setQualifier( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierIsMutable(); + qualifier_.set(index, value); + onChanged(); + return this; + } + public Builder addQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierIsMutable(); + qualifier_.add(value); + onChanged(); + return this; + } + public Builder addAllQualifier( + java.lang.Iterable values) { + ensureQualifierIsMutable(); + super.addAll(values, qualifier_); + onChanged(); + return this; + } + public Builder clearQualifier() { + qualifier_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Column) + } + + static { + defaultInstance = new Column(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Column) + } + + public interface GetOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // repeated .Column column = 2; + java.util.List + getColumnList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); + int getColumnCount(); + java.util.List + getColumnOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index); + + // repeated .NameBytesPair attribute = 3; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index); + + // optional uint64 lockId = 4; + boolean hasLockId(); + long getLockId(); + + // optional .Filter filter = 5; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); + + // optional .TimeRange timeRange = 6; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + + // optional uint32 maxVersions = 7 [default = 1]; + boolean hasMaxVersions(); + int getMaxVersions(); + + // optional bool cacheBlocks = 8 [default = true]; + boolean hasCacheBlocks(); + boolean getCacheBlocks(); + + // optional uint32 storeLimit = 9; + boolean hasStoreLimit(); + int getStoreLimit(); + + // optional uint32 storeOffset = 10; + boolean hasStoreOffset(); + int getStoreOffset(); + } + public static final class Get extends + com.google.protobuf.GeneratedMessage + implements GetOrBuilder { + // Use Get.newBuilder() to construct. + private Get(Builder builder) { + super(builder); + } + private Get(boolean noInit) {} + + private static final Get defaultInstance; + public static Get getDefaultInstance() { + return defaultInstance; + } + + public Get getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // repeated .Column column = 2; + public static final int COLUMN_FIELD_NUMBER = 2; + private java.util.List column_; + public java.util.List getColumnList() { + return column_; + } + public java.util.List + getColumnOrBuilderList() { + return column_; + } + public int getColumnCount() { + return column_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + return column_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + return column_.get(index); + } + + // repeated .NameBytesPair attribute = 3; + public static final int ATTRIBUTE_FIELD_NUMBER = 3; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional uint64 lockId = 4; + public static final int LOCKID_FIELD_NUMBER = 4; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + + // optional .Filter filter = 5; + public static final int FILTER_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + return filter_; + } + + // optional .TimeRange timeRange = 6; + public static final int TIMERANGE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + public static final int MAXVERSIONS_FIELD_NUMBER = 7; + private int maxVersions_; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getMaxVersions() { + return maxVersions_; + } + + // optional bool cacheBlocks = 8 [default = true]; + public static final int CACHEBLOCKS_FIELD_NUMBER = 8; + private boolean cacheBlocks_; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + + // optional uint32 storeLimit = 9; + public static final int STORELIMIT_FIELD_NUMBER = 9; + private int storeLimit_; + public boolean hasStoreLimit() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getStoreLimit() { + return storeLimit_; + } + + // optional uint32 storeOffset = 10; + public static final int STOREOFFSET_FIELD_NUMBER = 10; + private int storeOffset_; + public boolean hasStoreOffset() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public int getStoreOffset() { + return storeOffset_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + lockId_ = 0L; + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + maxVersions_ = 1; + cacheBlocks_ = true; + storeLimit_ = 0; + storeOffset_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + for (int i = 0; i < column_.size(); i++) { + output.writeMessage(2, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(3, attribute_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(4, lockId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt32(9, storeLimit_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeUInt32(10, storeOffset_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + for (int i = 0; i < column_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, attribute_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, lockId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(9, storeLimit_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(10, storeOffset_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && getColumnList() + .equals(other.getColumnList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && (hasMaxVersions() == other.hasMaxVersions()); + if (hasMaxVersions()) { + result = result && (getMaxVersions() + == other.getMaxVersions()); + } + result = result && (hasCacheBlocks() == other.hasCacheBlocks()); + if (hasCacheBlocks()) { + result = result && (getCacheBlocks() + == other.getCacheBlocks()); + } + result = result && (hasStoreLimit() == other.hasStoreLimit()); + if (hasStoreLimit()) { + result = result && (getStoreLimit() + == other.getStoreLimit()); + } + result = result && (hasStoreOffset() == other.hasStoreOffset()); + if (hasStoreOffset()) { + result = result && (getStoreOffset() + == other.getStoreOffset()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (getColumnCount() > 0) { + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumnList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + if (hasMaxVersions()) { + hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getMaxVersions(); + } + if (hasCacheBlocks()) { + hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCacheBlocks()); + } + if (hasStoreLimit()) { + hash = (37 * hash) + STORELIMIT_FIELD_NUMBER; + hash = (53 * hash) + getStoreLimit(); + } + if (hasStoreOffset()) { + hash = (37 * hash) + STOREOFFSET_FIELD_NUMBER; + hash = (53 * hash) + getStoreOffset(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFieldBuilder(); + getAttributeFieldBuilder(); + getFilterFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + columnBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + attributeBuilder_.clear(); + } + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + maxVersions_ = 1; + bitField0_ = (bitField0_ & ~0x00000040); + cacheBlocks_ = true; + bitField0_ = (bitField0_ & ~0x00000080); + storeLimit_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + storeOffset_ = 0; + bitField0_ = (bitField0_ & ~0x00000200); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (columnBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + column_ = java.util.Collections.unmodifiableList(column_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.column_ = column_; + } else { + result.column_ = columnBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000002; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.maxVersions_ = maxVersions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.cacheBlocks_ = cacheBlocks_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000040; + } + result.storeLimit_ = storeLimit_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000080; + } + result.storeOffset_ = storeOffset_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (columnBuilder_ == null) { + if (!other.column_.isEmpty()) { + if (column_.isEmpty()) { + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureColumnIsMutable(); + column_.addAll(other.column_); + } + onChanged(); + } + } else { + if (!other.column_.isEmpty()) { + if (columnBuilder_.isEmpty()) { + columnBuilder_.dispose(); + columnBuilder_ = null; + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000002); + columnBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFieldBuilder() : null; + } else { + columnBuilder_.addAllMessages(other.column_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000004); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + if (other.hasMaxVersions()) { + setMaxVersions(other.getMaxVersions()); + } + if (other.hasCacheBlocks()) { + setCacheBlocks(other.getCacheBlocks()); + } + if (other.hasStoreLimit()) { + setStoreLimit(other.getStoreLimit()); + } + if (other.hasStoreOffset()) { + setStoreOffset(other.getStoreOffset()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumn(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 32: { + bitField0_ |= 0x00000008; + lockId_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + cacheBlocks_ = input.readBool(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + storeLimit_ = input.readUInt32(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + storeOffset_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // repeated .Column column = 2; + private java.util.List column_ = + java.util.Collections.emptyList(); + private void ensureColumnIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + column_ = new java.util.ArrayList(column_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; + + public java.util.List getColumnList() { + if (columnBuilder_ == null) { + return java.util.Collections.unmodifiableList(column_); + } else { + return columnBuilder_.getMessageList(); + } + } + public int getColumnCount() { + if (columnBuilder_ == null) { + return column_.size(); + } else { + return columnBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + if (columnBuilder_ == null) { + return column_.get(index); + } else { + return columnBuilder_.getMessage(index); + } + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.set(index, value); + onChanged(); + } else { + columnBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.set(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(value); + onChanged(); + } else { + columnBuilder_.addMessage(value); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(index, value); + onChanged(); + } else { + columnBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumn( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumn( + java.lang.Iterable values) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + super.addAll(values, column_); + onChanged(); + } else { + columnBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumn() { + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + columnBuilder_.clear(); + } + return this; + } + public Builder removeColumn(int index) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.remove(index); + onChanged(); + } else { + columnBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( + int index) { + return getColumnFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + if (columnBuilder_ == null) { + return column_.get(index); } else { + return columnBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnOrBuilderList() { + if (columnBuilder_ != null) { + return columnBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(column_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { + return getColumnFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( + int index) { + return getColumnFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public java.util.List + getColumnBuilderList() { + return getColumnFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> + getColumnFieldBuilder() { + if (columnBuilder_ == null) { + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( + column_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + column_ = null; + } + return columnBuilder_; + } + + // repeated .NameBytesPair attribute = 3; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + attribute_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional uint64 lockId = 4; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000008; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000008); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional .Filter filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // optional .TimeRange timeRange = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + private int maxVersions_ = 1; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getMaxVersions() { + return maxVersions_; + } + public Builder setMaxVersions(int value) { + bitField0_ |= 0x00000040; + maxVersions_ = value; + onChanged(); + return this; + } + public Builder clearMaxVersions() { + bitField0_ = (bitField0_ & ~0x00000040); + maxVersions_ = 1; + onChanged(); + return this; + } + + // optional bool cacheBlocks = 8 [default = true]; + private boolean cacheBlocks_ = true; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + public Builder setCacheBlocks(boolean value) { + bitField0_ |= 0x00000080; + cacheBlocks_ = value; + onChanged(); + return this; + } + public Builder clearCacheBlocks() { + bitField0_ = (bitField0_ & ~0x00000080); + cacheBlocks_ = true; + onChanged(); + return this; + } + + // optional uint32 storeLimit = 9; + private int storeLimit_ ; + public boolean hasStoreLimit() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public int getStoreLimit() { + return storeLimit_; + } + public Builder setStoreLimit(int value) { + bitField0_ |= 0x00000100; + storeLimit_ = value; + onChanged(); + return this; + } + public Builder clearStoreLimit() { + bitField0_ = (bitField0_ & ~0x00000100); + storeLimit_ = 0; + onChanged(); + return this; + } + + // optional uint32 storeOffset = 10; + private int storeOffset_ ; + public boolean hasStoreOffset() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + public int getStoreOffset() { + return storeOffset_; + } + public Builder setStoreOffset(int value) { + bitField0_ |= 0x00000200; + storeOffset_ = value; + onChanged(); + return this; + } + public Builder clearStoreOffset() { + bitField0_ = (bitField0_ & ~0x00000200); + storeOffset_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Get) + } + + static { + defaultInstance = new Get(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Get) + } + + public interface ResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes keyValueBytes = 1; + java.util.List getKeyValueBytesList(); + int getKeyValueBytesCount(); + com.google.protobuf.ByteString getKeyValueBytes(int index); + } + public static final class Result extends + com.google.protobuf.GeneratedMessage + implements ResultOrBuilder { + // Use Result.newBuilder() to construct. + private Result(Builder builder) { + super(builder); + } + private Result(boolean noInit) {} + + private static final Result defaultInstance; + public static Result getDefaultInstance() { + return defaultInstance; + } + + public Result getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; + } + + // repeated bytes keyValueBytes = 1; + public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; + private java.util.List keyValueBytes_; + public java.util.List + getKeyValueBytesList() { + return keyValueBytes_; + } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); + } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); + } + + private void initFields() { + keyValueBytes_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < keyValueBytes_.size(); i++) { + output.writeBytes(1, keyValueBytes_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < keyValueBytes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValueBytes_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueBytesList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj; + + boolean result = true; + result = result && getKeyValueBytesList() + .equals(other.getKeyValueBytesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getKeyValueBytesCount() > 0) { + hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueBytesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.keyValueBytes_ = keyValueBytes_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; + if (!other.keyValueBytes_.isEmpty()) { + if (keyValueBytes_.isEmpty()) { + keyValueBytes_ = other.keyValueBytes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.addAll(other.keyValueBytes_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes keyValueBytes = 1; + private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; + private void ensureKeyValueBytesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getKeyValueBytesList() { + return java.util.Collections.unmodifiableList(keyValueBytes_); + } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); + } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); + } + public Builder setKeyValueBytes( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.set(index, value); + onChanged(); + return this; + } + public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(value); + onChanged(); + return this; + } + public Builder addAllKeyValueBytes( + java.lang.Iterable values) { + ensureKeyValueBytesIsMutable(); + super.addAll(values, keyValueBytes_); + onChanged(); + return this; + } + public Builder clearKeyValueBytes() { + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Result) + } + + static { + defaultInstance = new Result(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Result) + } + + public interface GetRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Get get = 2; + boolean hasGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); + + // optional bool closestRowBefore = 3; + boolean hasClosestRowBefore(); + boolean getClosestRowBefore(); + + // optional bool existenceOnly = 4; + boolean hasExistenceOnly(); + boolean getExistenceOnly(); + } + public static final class GetRequest extends + com.google.protobuf.GeneratedMessage + implements GetRequestOrBuilder { + // Use GetRequest.newBuilder() to construct. + private GetRequest(Builder builder) { + super(builder); + } + private GetRequest(boolean noInit) {} + + private static final GetRequest defaultInstance; + public static GetRequest getDefaultInstance() { + return defaultInstance; + } + + public GetRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Get get = 2; + public static final int GET_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { + return get_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { + return get_; + } + + // optional bool closestRowBefore = 3; + public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; + private boolean closestRowBefore_; + public boolean hasClosestRowBefore() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getClosestRowBefore() { + return closestRowBefore_; + } + + // optional bool existenceOnly = 4; + public static final int EXISTENCEONLY_FIELD_NUMBER = 4; + private boolean existenceOnly_; + public boolean hasExistenceOnly() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getExistenceOnly() { + return existenceOnly_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + closestRowBefore_ = false; + existenceOnly_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasGet()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getGet().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, closestRowBefore_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, existenceOnly_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, closestRowBefore_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, existenceOnly_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasGet() == other.hasGet()); + if (hasGet()) { + result = result && getGet() + .equals(other.getGet()); + } + result = result && (hasClosestRowBefore() == other.hasClosestRowBefore()); + if (hasClosestRowBefore()) { + result = result && (getClosestRowBefore() + == other.getClosestRowBefore()); + } + result = result && (hasExistenceOnly() == other.hasExistenceOnly()); + if (hasExistenceOnly()) { + result = result && (getExistenceOnly() + == other.getExistenceOnly()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasGet()) { + hash = (37 * hash) + GET_FIELD_NUMBER; + hash = (53 * hash) + getGet().hashCode(); + } + if (hasClosestRowBefore()) { + hash = (37 * hash) + CLOSESTROWBEFORE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getClosestRowBefore()); + } + if (hasExistenceOnly()) { + hash = (37 * hash) + EXISTENCEONLY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExistenceOnly()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getGetFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + closestRowBefore_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + existenceOnly_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (getBuilder_ == null) { + result.get_ = get_; + } else { + result.get_ = getBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.closestRowBefore_ = closestRowBefore_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.existenceOnly_ = existenceOnly_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasGet()) { + mergeGet(other.getGet()); + } + if (other.hasClosestRowBefore()) { + setClosestRowBefore(other.getClosestRowBefore()); + } + if (other.hasExistenceOnly()) { + setExistenceOnly(other.getExistenceOnly()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasGet()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getGet().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); + if (hasGet()) { + subBuilder.mergeFrom(getGet()); + } + input.readMessage(subBuilder, extensionRegistry); + setGet(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + closestRowBefore_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + existenceOnly_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Get get = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { + if (getBuilder_ == null) { + return get_; + } else { + return getBuilder_.getMessage(); + } + } + public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { + if (getBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + get_ = value; + onChanged(); + } else { + getBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setGet( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { + if (getBuilder_ == null) { + get_ = builderForValue.build(); + onChanged(); + } else { + getBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { + if (getBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { + get_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); + } else { + get_ = value; + } + onChanged(); + } else { + getBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearGet() { + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + onChanged(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getGetFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { + if (getBuilder_ != null) { + return getBuilder_.getMessageOrBuilder(); + } else { + return get_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> + getGetFieldBuilder() { + if (getBuilder_ == null) { + getBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( + get_, + getParentForChildren(), + isClean()); + get_ = null; + } + return getBuilder_; + } + + // optional bool closestRowBefore = 3; + private boolean closestRowBefore_ ; + public boolean hasClosestRowBefore() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getClosestRowBefore() { + return closestRowBefore_; + } + public Builder setClosestRowBefore(boolean value) { + bitField0_ |= 0x00000004; + closestRowBefore_ = value; + onChanged(); + return this; + } + public Builder clearClosestRowBefore() { + bitField0_ = (bitField0_ & ~0x00000004); + closestRowBefore_ = false; + onChanged(); + return this; + } + + // optional bool existenceOnly = 4; + private boolean existenceOnly_ ; + public boolean hasExistenceOnly() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getExistenceOnly() { + return existenceOnly_; + } + public Builder setExistenceOnly(boolean value) { + bitField0_ |= 0x00000008; + existenceOnly_ = value; + onChanged(); + return this; + } + public Builder clearExistenceOnly() { + bitField0_ = (bitField0_ & ~0x00000008); + existenceOnly_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetRequest) + } + + static { + defaultInstance = new GetRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRequest) + } + + public interface GetResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + boolean hasResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional bool exists = 2; + boolean hasExists(); + boolean getExists(); + } + public static final class GetResponse extends + com.google.protobuf.GeneratedMessage + implements GetResponseOrBuilder { + // Use GetResponse.newBuilder() to construct. + private GetResponse(Builder builder) { + super(builder); + } + private GetResponse(boolean noInit) {} + + private static final GetResponse defaultInstance; + public static GetResponse getDefaultInstance() { + return defaultInstance; + } + + public GetResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + return result_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional bool exists = 2; + public static final int EXISTS_FIELD_NUMBER = 2; + private boolean exists_; + public boolean hasExists() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getExists() { + return exists_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + exists_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, exists_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, exists_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasExists() == other.hasExists()); + if (hasExists()) { + result = result && (getExists() + == other.getExists()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasExists()) { + hash = (37 * hash) + EXISTS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExists()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + exists_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.exists_ = exists_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasExists()) { + setExists(other.getExists()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); + if (hasResult()) { + subBuilder.mergeFrom(getResult()); + } + input.readMessage(subBuilder, extensionRegistry); + setResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + exists_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional bool exists = 2; + private boolean exists_ ; + public boolean hasExists() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getExists() { + return exists_; + } + public Builder setExists(boolean value) { + bitField0_ |= 0x00000002; + exists_ = value; + onChanged(); + return this; + } + public Builder clearExists() { + bitField0_ = (bitField0_ & ~0x00000002); + exists_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetResponse) + } + + static { + defaultInstance = new GetResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetResponse) + } + + public interface ConditionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required bytes family = 2; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required bytes qualifier = 3; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // required .CompareType compareType = 4; + boolean hasCompareType(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType(); + + // required .Comparator comparator = 5; + boolean hasComparator(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); + } + public static final class Condition extends + com.google.protobuf.GeneratedMessage + implements ConditionOrBuilder { + // Use Condition.newBuilder() to construct. + private Condition(Builder builder) { + super(builder); + } + private Condition(boolean noInit) {} + + private static final Condition defaultInstance; + public static Condition getDefaultInstance() { + return defaultInstance; + } + + public Condition getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required bytes family = 2; + public static final int FAMILY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required bytes qualifier = 3; + public static final int QUALIFIER_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // required .CompareType compareType = 4; + public static final int COMPARETYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_; + public boolean hasCompareType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { + return compareType_; + } + + // required .Comparator comparator = 5; + public static final int COMPARATOR_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { + return comparator_; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { + return comparator_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCompareType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasComparator()) { + memoizedIsInitialized = 0; + return false; + } + if (!getComparator().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, compareType_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, comparator_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, compareType_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, comparator_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasCompareType() == other.hasCompareType()); + if (hasCompareType()) { + result = result && + (getCompareType() == other.getCompareType()); + } + result = result && (hasComparator() == other.hasComparator()); + if (hasComparator()) { + result = result && getComparator() + .equals(other.getComparator()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasCompareType()) { + hash = (37 * hash) + COMPARETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getCompareType()); + } + if (hasComparator()) { + hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; + hash = (53 * hash) + getComparator().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparatorFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + bitField0_ = (bitField0_ & ~0x00000008); + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.compareType_ = compareType_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (comparatorBuilder_ == null) { + result.comparator_ = comparator_; + } else { + result.comparator_ = comparatorBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasCompareType()) { + setCompareType(other.getCompareType()); + } + if (other.hasComparator()) { + mergeComparator(other.getComparator()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasFamily()) { + + return false; + } + if (!hasQualifier()) { + + return false; + } + if (!hasCompareType()) { + + return false; + } + if (!hasComparator()) { + + return false; + } + if (!getComparator().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + compareType_ = value; + } + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); + if (hasComparator()) { + subBuilder.mergeFrom(getComparator()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparator(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required bytes family = 2; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000002); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required bytes qualifier = 3; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000004); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // required .CompareType compareType = 4; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + public boolean hasCompareType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { + return compareType_; + } + public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compareType_ = value; + onChanged(); + return this; + } + public Builder clearCompareType() { + bitField0_ = (bitField0_ & ~0x00000008); + compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + onChanged(); + return this; + } + + // required .Comparator comparator = 5; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { + if (comparatorBuilder_ == null) { + return comparator_; + } else { + return comparatorBuilder_.getMessage(); + } + } + public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { + if (comparatorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparator_ = value; + onChanged(); + } else { + comparatorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setComparator( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { + if (comparatorBuilder_ == null) { + comparator_ = builderForValue.build(); + onChanged(); + } else { + comparatorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { + if (comparatorBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { + comparator_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); + } else { + comparator_ = value; + } + onChanged(); + } else { + comparatorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearComparator() { + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + onChanged(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getComparatorFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { + if (comparatorBuilder_ != null) { + return comparatorBuilder_.getMessageOrBuilder(); + } else { + return comparator_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> + getComparatorFieldBuilder() { + if (comparatorBuilder_ == null) { + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( + comparator_, + getParentForChildren(), + isClean()); + comparator_ = null; + } + return comparatorBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Condition) + } + + static { + defaultInstance = new Condition(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Condition) + } + + public interface MutateOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required .Mutate.MutateType mutateType = 2; + boolean hasMutateType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType(); + + // repeated .Mutate.ColumnValue columnValue = 3; + java.util.List + getColumnValueList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index); + int getColumnValueCount(); + java.util.List + getColumnValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index); + + // repeated .NameBytesPair attribute = 4; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index); + + // optional uint64 timestamp = 5; + boolean hasTimestamp(); + long getTimestamp(); + + // optional uint64 lockId = 6; + boolean hasLockId(); + long getLockId(); + + // optional bool writeToWAL = 7 [default = true]; + boolean hasWriteToWAL(); + boolean getWriteToWAL(); + + // optional .TimeRange timeRange = 10; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + } + public static final class Mutate extends + com.google.protobuf.GeneratedMessage + implements MutateOrBuilder { + // Use Mutate.newBuilder() to construct. + private Mutate(Builder builder) { + super(builder); + } + private Mutate(boolean noInit) {} + + private static final Mutate defaultInstance; + public static Mutate getDefaultInstance() { + return defaultInstance; + } + + public Mutate getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; + } + + public enum MutateType + implements com.google.protobuf.ProtocolMessageEnum { + APPEND(0, 0), + INCREMENT(1, 1), + PUT(2, 2), + DELETE(3, 3), + ; + + public static final int APPEND_VALUE = 0; + public static final int INCREMENT_VALUE = 1; + public static final int PUT_VALUE = 2; + public static final int DELETE_VALUE = 3; + + + public final int getNumber() { return value; } + + public static MutateType valueOf(int value) { + switch (value) { + case 0: return APPEND; + case 1: return INCREMENT; + case 2: return PUT; + case 3: return DELETE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public MutateType findValueByNumber(int number) { + return MutateType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); + } + + private static final MutateType[] VALUES = { + APPEND, INCREMENT, PUT, DELETE, + }; + + public static MutateType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private MutateType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Mutate.MutateType) + } + + public enum DeleteType + implements com.google.protobuf.ProtocolMessageEnum { + DELETE_ONE_VERSION(0, 0), + DELETE_MULTIPLE_VERSIONS(1, 1), + DELETE_FAMILY(2, 2), + ; + + public static final int DELETE_ONE_VERSION_VALUE = 0; + public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; + public static final int DELETE_FAMILY_VALUE = 2; + + + public final int getNumber() { return value; } + + public static DeleteType valueOf(int value) { + switch (value) { + case 0: return DELETE_ONE_VERSION; + case 1: return DELETE_MULTIPLE_VERSIONS; + case 2: return DELETE_FAMILY; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DeleteType findValueByNumber(int number) { + return DeleteType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(1); + } + + private static final DeleteType[] VALUES = { + DELETE_ONE_VERSION, DELETE_MULTIPLE_VERSIONS, DELETE_FAMILY, + }; + + public static DeleteType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private DeleteType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Mutate.DeleteType) + } + + public interface ColumnValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + java.util.List + getQualifierValueList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); + int getQualifierValueCount(); + java.util.List + getQualifierValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index); + } + public static final class ColumnValue extends + com.google.protobuf.GeneratedMessage + implements ColumnValueOrBuilder { + // Use ColumnValue.newBuilder() to construct. + private ColumnValue(Builder builder) { + super(builder); + } + private ColumnValue(boolean noInit) {} + + private static final ColumnValue defaultInstance; + public static ColumnValue getDefaultInstance() { + return defaultInstance; + } + + public ColumnValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + } + + public interface QualifierValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes qualifier = 1; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + + // optional uint64 timestamp = 3; + boolean hasTimestamp(); + long getTimestamp(); + + // optional .Mutate.DeleteType deleteType = 4; + boolean hasDeleteType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType(); + } + public static final class QualifierValue extends + com.google.protobuf.GeneratedMessage + implements QualifierValueOrBuilder { + // Use QualifierValue.newBuilder() to construct. + private QualifierValue(Builder builder) { + super(builder); + } + private QualifierValue(boolean noInit) {} + + private static final QualifierValue defaultInstance; + public static QualifierValue getDefaultInstance() { + return defaultInstance; + } + + public QualifierValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes qualifier = 1; + public static final int QUALIFIER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + // optional uint64 timestamp = 3; + public static final int TIMESTAMP_FIELD_NUMBER = 3; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + + // optional .Mutate.DeleteType deleteType = 4; + public static final int DELETETYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_; + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { + return deleteType_; + } + + private void initFields() { + qualifier_ = com.google.protobuf.ByteString.EMPTY; + value_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, qualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, deleteType_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, qualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, deleteType_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) obj; + + boolean result = true; + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasDeleteType() == other.hasDeleteType()); + if (hasDeleteType()) { + result = result && + (getDeleteType() == other.getDeleteType()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasDeleteType()) { + hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getDeleteType()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.deleteType_ = deleteType_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasDeleteType()) { + setDeleteType(other.getDeleteType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + qualifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + deleteType_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // optional bytes qualifier = 1; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // optional uint64 timestamp = 3; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000004; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional .Mutate.DeleteType deleteType = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { + return deleteType_; + } + public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + deleteType_ = value; + onChanged(); + return this; + } + public Builder clearDeleteType() { + bitField0_ = (bitField0_ & ~0x00000008); + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue.QualifierValue) + } + + static { + defaultInstance = new QualifierValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate.ColumnValue.QualifierValue) + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; + private java.util.List qualifierValue_; + public java.util.List getQualifierValueList() { + return qualifierValue_; + } + public java.util.List + getQualifierValueOrBuilderList() { + return qualifierValue_; + } + public int getQualifierValueCount() { + return qualifierValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + return qualifierValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index) { + return qualifierValue_.get(index); + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifierValue_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + for (int i = 0; i < qualifierValue_.size(); i++) { + output.writeMessage(2, qualifierValue_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + for (int i = 0; i < qualifierValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, qualifierValue_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && getQualifierValueList() + .equals(other.getQualifierValueList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (getQualifierValueCount() > 0) { + hash = (37 * hash) + QUALIFIERVALUE_FIELD_NUMBER; + hash = (53 * hash) + getQualifierValueList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getQualifierValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (qualifierValueBuilder_ == null) { + qualifierValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + qualifierValueBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (qualifierValueBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.qualifierValue_ = qualifierValue_; + } else { + result.qualifierValue_ = qualifierValueBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (qualifierValueBuilder_ == null) { + if (!other.qualifierValue_.isEmpty()) { + if (qualifierValue_.isEmpty()) { + qualifierValue_ = other.qualifierValue_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureQualifierValueIsMutable(); + qualifierValue_.addAll(other.qualifierValue_); + } + onChanged(); + } + } else { + if (!other.qualifierValue_.isEmpty()) { + if (qualifierValueBuilder_.isEmpty()) { + qualifierValueBuilder_.dispose(); + qualifierValueBuilder_ = null; + qualifierValue_ = other.qualifierValue_; + bitField0_ = (bitField0_ & ~0x00000002); + qualifierValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getQualifierValueFieldBuilder() : null; + } else { + qualifierValueBuilder_.addAllMessages(other.qualifierValue_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addQualifierValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + private java.util.List qualifierValue_ = + java.util.Collections.emptyList(); + private void ensureQualifierValueIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = new java.util.ArrayList(qualifierValue_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; + + public java.util.List getQualifierValueList() { + if (qualifierValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(qualifierValue_); + } else { + return qualifierValueBuilder_.getMessageList(); + } + } + public int getQualifierValueCount() { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.size(); + } else { + return qualifierValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.get(index); + } else { + return qualifierValueBuilder_.getMessage(index); + } + } + public Builder setQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.set(index, value); + onChanged(); + } else { + qualifierValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.set(index, builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.add(value); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(value); + } + return this; + } + public Builder addQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.add(index, value); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addQualifierValue( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.add(builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.add(index, builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllQualifierValue( + java.lang.Iterable values) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + super.addAll(values, qualifierValue_); + onChanged(); + } else { + qualifierValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearQualifierValue() { + if (qualifierValueBuilder_ == null) { + qualifierValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + qualifierValueBuilder_.clear(); + } + return this; + } + public Builder removeQualifierValue(int index) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.remove(index); + onChanged(); + } else { + qualifierValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( + int index) { + return getQualifierValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index) { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.get(index); } else { + return qualifierValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getQualifierValueOrBuilderList() { + if (qualifierValueBuilder_ != null) { + return qualifierValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(qualifierValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { + return getQualifierValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( + int index) { + return getQualifierValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + } + public java.util.List + getQualifierValueBuilderList() { + return getQualifierValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> + getQualifierValueFieldBuilder() { + if (qualifierValueBuilder_ == null) { + qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( + qualifierValue_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + qualifierValue_ = null; + } + return qualifierValueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue) + } + + static { + defaultInstance = new ColumnValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate.ColumnValue) + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required .Mutate.MutateType mutateType = 2; + public static final int MUTATETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_; + public boolean hasMutateType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { + return mutateType_; + } + + // repeated .Mutate.ColumnValue columnValue = 3; + public static final int COLUMNVALUE_FIELD_NUMBER = 3; + private java.util.List columnValue_; + public java.util.List getColumnValueList() { + return columnValue_; + } + public java.util.List + getColumnValueOrBuilderList() { + return columnValue_; + } + public int getColumnValueCount() { + return columnValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { + return columnValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index) { + return columnValue_.get(index); + } + + // repeated .NameBytesPair attribute = 4; + public static final int ATTRIBUTE_FIELD_NUMBER = 4; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional uint64 timestamp = 5; + public static final int TIMESTAMP_FIELD_NUMBER = 5; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + + // optional uint64 lockId = 6; + public static final int LOCKID_FIELD_NUMBER = 6; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getLockId() { + return lockId_; + } + + // optional bool writeToWAL = 7 [default = true]; + public static final int WRITETOWAL_FIELD_NUMBER = 7; + private boolean writeToWAL_; + public boolean hasWriteToWAL() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getWriteToWAL() { + return writeToWAL_; + } + + // optional .TimeRange timeRange = 10; + public static final int TIMERANGE_FIELD_NUMBER = 10; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + columnValue_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + timestamp_ = 0L; + lockId_ = 0L; + writeToWAL_ = true; + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMutateType()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColumnValueCount(); i++) { + if (!getColumnValue(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, mutateType_.getNumber()); + } + for (int i = 0; i < columnValue_.size(); i++) { + output.writeMessage(3, columnValue_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(4, attribute_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(5, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(6, lockId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(7, writeToWAL_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeMessage(10, timeRange_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, mutateType_.getNumber()); + } + for (int i = 0; i < columnValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, columnValue_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, attribute_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(5, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(6, lockId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, writeToWAL_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, timeRange_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasMutateType() == other.hasMutateType()); + if (hasMutateType()) { + result = result && + (getMutateType() == other.getMutateType()); + } + result = result && getColumnValueList() + .equals(other.getColumnValueList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasWriteToWAL() == other.hasWriteToWAL()); + if (hasWriteToWAL()) { + result = result && (getWriteToWAL() + == other.getWriteToWAL()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasMutateType()) { + hash = (37 * hash) + MUTATETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getMutateType()); + } + if (getColumnValueCount() > 0) { + hash = (37 * hash) + COLUMNVALUE_FIELD_NUMBER; + hash = (53 * hash) + getColumnValueList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasWriteToWAL()) { + hash = (37 * hash) + WRITETOWAL_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getWriteToWAL()); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnValueFieldBuilder(); + getAttributeFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + bitField0_ = (bitField0_ & ~0x00000002); + if (columnValueBuilder_ == null) { + columnValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + columnValueBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + attributeBuilder_.clear(); + } + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000010); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000020); + writeToWAL_ = true; + bitField0_ = (bitField0_ & ~0x00000040); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mutateType_ = mutateType_; + if (columnValueBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = java.util.Collections.unmodifiableList(columnValue_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.columnValue_ = columnValue_; + } else { + result.columnValue_ = columnValueBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.writeToWAL_ = writeToWAL_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasMutateType()) { + setMutateType(other.getMutateType()); + } + if (columnValueBuilder_ == null) { + if (!other.columnValue_.isEmpty()) { + if (columnValue_.isEmpty()) { + columnValue_ = other.columnValue_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureColumnValueIsMutable(); + columnValue_.addAll(other.columnValue_); + } + onChanged(); + } + } else { + if (!other.columnValue_.isEmpty()) { + if (columnValueBuilder_.isEmpty()) { + columnValueBuilder_.dispose(); + columnValueBuilder_ = null; + columnValue_ = other.columnValue_; + bitField0_ = (bitField0_ & ~0x00000004); + columnValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnValueFieldBuilder() : null; + } else { + columnValueBuilder_.addAllMessages(other.columnValue_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000008); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasWriteToWAL()) { + setWriteToWAL(other.getWriteToWAL()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasMutateType()) { + + return false; + } + for (int i = 0; i < getColumnValueCount(); i++) { + if (!getColumnValue(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + mutateType_ = value; + } + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumnValue(subBuilder.buildPartial()); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 40: { + bitField0_ |= 0x00000010; + timestamp_ = input.readUInt64(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + lockId_ = input.readUInt64(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + writeToWAL_ = input.readBool(); + break; + } + case 82: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required .Mutate.MutateType mutateType = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + public boolean hasMutateType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { + return mutateType_; + } + public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + mutateType_ = value; + onChanged(); + return this; + } + public Builder clearMutateType() { + bitField0_ = (bitField0_ & ~0x00000002); + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + onChanged(); + return this; + } + + // repeated .Mutate.ColumnValue columnValue = 3; + private java.util.List columnValue_ = + java.util.Collections.emptyList(); + private void ensureColumnValueIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = new java.util.ArrayList(columnValue_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; + + public java.util.List getColumnValueList() { + if (columnValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(columnValue_); + } else { + return columnValueBuilder_.getMessageList(); + } + } + public int getColumnValueCount() { + if (columnValueBuilder_ == null) { + return columnValue_.size(); + } else { + return columnValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { + if (columnValueBuilder_ == null) { + return columnValue_.get(index); + } else { + return columnValueBuilder_.getMessage(index); + } + } + public Builder setColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.set(index, value); + onChanged(); + } else { + columnValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.set(index, builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.add(value); + onChanged(); + } else { + columnValueBuilder_.addMessage(value); + } + return this; + } + public Builder addColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.add(index, value); + onChanged(); + } else { + columnValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumnValue( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.add(builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.add(index, builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumnValue( + java.lang.Iterable values) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + super.addAll(values, columnValue_); + onChanged(); + } else { + columnValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumnValue() { + if (columnValueBuilder_ == null) { + columnValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + columnValueBuilder_.clear(); + } + return this; + } + public Builder removeColumnValue(int index) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.remove(index); + onChanged(); + } else { + columnValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( + int index) { + return getColumnValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index) { + if (columnValueBuilder_ == null) { + return columnValue_.get(index); } else { + return columnValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnValueOrBuilderList() { + if (columnValueBuilder_ != null) { + return columnValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(columnValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { + return getColumnValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( + int index) { + return getColumnValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); + } + public java.util.List + getColumnValueBuilderList() { + return getColumnValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> + getColumnValueFieldBuilder() { + if (columnValueBuilder_ == null) { + columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder>( + columnValue_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + columnValue_ = null; + } + return columnValueBuilder_; + } + + // repeated .NameBytesPair attribute = 4; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + attribute_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional uint64 timestamp = 5; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000010; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000010); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional uint64 lockId = 6; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000020; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000020); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional bool writeToWAL = 7 [default = true]; + private boolean writeToWAL_ = true; + public boolean hasWriteToWAL() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public boolean getWriteToWAL() { + return writeToWAL_; + } + public Builder setWriteToWAL(boolean value) { + bitField0_ |= 0x00000040; + writeToWAL_ = value; + onChanged(); + return this; + } + public Builder clearWriteToWAL() { + bitField0_ = (bitField0_ & ~0x00000040); + writeToWAL_ = true; + onChanged(); + return this; + } + + // optional .TimeRange timeRange = 10; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000080) == 0x00000080) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000080; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Mutate) + } + + static { + defaultInstance = new Mutate(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate) + } + + public interface MutateRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Mutate mutate = 2; + boolean hasMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder(); + + // optional .Condition condition = 3; + boolean hasCondition(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); + } + public static final class MutateRequest extends + com.google.protobuf.GeneratedMessage + implements MutateRequestOrBuilder { + // Use MutateRequest.newBuilder() to construct. + private MutateRequest(Builder builder) { + super(builder); + } + private MutateRequest(boolean noInit) {} + + private static final MutateRequest defaultInstance; + public static MutateRequest getDefaultInstance() { + return defaultInstance; + } + + public MutateRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Mutate mutate = 2; + public static final int MUTATE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { + return mutate_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { + return mutate_; + } + + // optional .Condition condition = 3; + public static final int CONDITION_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_; + public boolean hasCondition() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { + return condition_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { + return condition_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMutate()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getMutate().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasCondition()) { + if (!getCondition().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, mutate_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(3, condition_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, mutate_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, condition_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasMutate() == other.hasMutate()); + if (hasMutate()) { + result = result && getMutate() + .equals(other.getMutate()); + } + result = result && (hasCondition() == other.hasCondition()); + if (hasCondition()) { + result = result && getCondition() + .equals(other.getCondition()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasMutate()) { + hash = (37 * hash) + MUTATE_FIELD_NUMBER; + hash = (53 * hash) + getMutate().hashCode(); + } + if (hasCondition()) { + hash = (37 * hash) + CONDITION_FIELD_NUMBER; + hash = (53 * hash) + getCondition().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getMutateFieldBuilder(); + getConditionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + if (conditionBuilder_ == null) { + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + } else { + conditionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (mutateBuilder_ == null) { + result.mutate_ = mutate_; + } else { + result.mutate_ = mutateBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + if (conditionBuilder_ == null) { + result.condition_ = condition_; + } else { + result.condition_ = conditionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasMutate()) { + mergeMutate(other.getMutate()); + } + if (other.hasCondition()) { + mergeCondition(other.getCondition()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasMutate()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getMutate().isInitialized()) { + + return false; + } + if (hasCondition()) { + if (!getCondition().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); + if (hasMutate()) { + subBuilder.mergeFrom(getMutate()); + } + input.readMessage(subBuilder, extensionRegistry); + setMutate(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(); + if (hasCondition()) { + subBuilder.mergeFrom(getCondition()); + } + input.readMessage(subBuilder, extensionRegistry); + setCondition(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Mutate mutate = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutateBuilder_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { + if (mutateBuilder_ == null) { + return mutate_; + } else { + return mutateBuilder_.getMessage(); + } + } + public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + mutate_ = value; + onChanged(); + } else { + mutateBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setMutate( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { + if (mutateBuilder_ == null) { + mutate_ = builderForValue.build(); + onChanged(); + } else { + mutateBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + mutate_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) { + mutate_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); + } else { + mutate_ = value; + } + onChanged(); + } else { + mutateBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearMutate() { + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + onChanged(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutateBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getMutateFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { + if (mutateBuilder_ != null) { + return mutateBuilder_.getMessageOrBuilder(); + } else { + return mutate_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> + getMutateFieldBuilder() { + if (mutateBuilder_ == null) { + mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( + mutate_, + getParentForChildren(), + isClean()); + mutate_ = null; + } + return mutateBuilder_; + } + + // optional .Condition condition = 3; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; + public boolean hasCondition() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { + if (conditionBuilder_ == null) { + return condition_; + } else { + return conditionBuilder_.getMessage(); + } + } + public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { + if (conditionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + condition_ = value; + onChanged(); + } else { + conditionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder setCondition( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { + if (conditionBuilder_ == null) { + condition_ = builderForValue.build(); + onChanged(); + } else { + conditionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { + if (conditionBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { + condition_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); + } else { + condition_ = value; + } + onChanged(); + } else { + conditionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder clearCondition() { + if (conditionBuilder_ == null) { + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + onChanged(); + } else { + conditionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getConditionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { + if (conditionBuilder_ != null) { + return conditionBuilder_.getMessageOrBuilder(); + } else { + return condition_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> + getConditionFieldBuilder() { + if (conditionBuilder_ == null) { + conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>( + condition_, + getParentForChildren(), + isClean()); + condition_ = null; + } + return conditionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MutateRequest) + } + + static { + defaultInstance = new MutateRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateRequest) + } + + public interface MutateResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + boolean hasResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional bool processed = 2; + boolean hasProcessed(); + boolean getProcessed(); + } + public static final class MutateResponse extends + com.google.protobuf.GeneratedMessage + implements MutateResponseOrBuilder { + // Use MutateResponse.newBuilder() to construct. + private MutateResponse(Builder builder) { + super(builder); + } + private MutateResponse(boolean noInit) {} + + private static final MutateResponse defaultInstance; + public static MutateResponse getDefaultInstance() { + return defaultInstance; + } + + public MutateResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + return result_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional bool processed = 2; + public static final int PROCESSED_FIELD_NUMBER = 2; + private boolean processed_; + public boolean hasProcessed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getProcessed() { + return processed_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + processed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, processed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, processed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasProcessed() == other.hasProcessed()); + if (hasProcessed()) { + result = result && (getProcessed() + == other.getProcessed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasProcessed()) { + hash = (37 * hash) + PROCESSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getProcessed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + processed_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.processed_ = processed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasProcessed()) { + setProcessed(other.getProcessed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); + if (hasResult()) { + subBuilder.mergeFrom(getResult()); + } + input.readMessage(subBuilder, extensionRegistry); + setResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + processed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional bool processed = 2; + private boolean processed_ ; + public boolean hasProcessed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getProcessed() { + return processed_; + } + public Builder setProcessed(boolean value) { + bitField0_ |= 0x00000002; + processed_ = value; + onChanged(); + return this; + } + public Builder clearProcessed() { + bitField0_ = (bitField0_ & ~0x00000002); + processed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MutateResponse) + } + + static { + defaultInstance = new MutateResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateResponse) + } + + public interface ScanOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Column column = 1; + java.util.List + getColumnList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); + int getColumnCount(); + java.util.List + getColumnOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index); + + // repeated .NameBytesPair attribute = 2; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index); + + // optional bytes startRow = 3; + boolean hasStartRow(); + com.google.protobuf.ByteString getStartRow(); + + // optional bytes stopRow = 4; + boolean hasStopRow(); + com.google.protobuf.ByteString getStopRow(); + + // optional .Filter filter = 5; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); + + // optional .TimeRange timeRange = 6; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + + // optional uint32 maxVersions = 7 [default = 1]; + boolean hasMaxVersions(); + int getMaxVersions(); + + // optional bool cacheBlocks = 8 [default = true]; + boolean hasCacheBlocks(); + boolean getCacheBlocks(); + + // optional uint32 batchSize = 9; + boolean hasBatchSize(); + int getBatchSize(); + + // optional uint64 maxResultSize = 10; + boolean hasMaxResultSize(); + long getMaxResultSize(); + + // optional uint32 storeLimit = 11; + boolean hasStoreLimit(); + int getStoreLimit(); + + // optional uint32 storeOffset = 12; + boolean hasStoreOffset(); + int getStoreOffset(); + } + public static final class Scan extends + com.google.protobuf.GeneratedMessage + implements ScanOrBuilder { + // Use Scan.newBuilder() to construct. + private Scan(Builder builder) { + super(builder); + } + private Scan(boolean noInit) {} + + private static final Scan defaultInstance; + public static Scan getDefaultInstance() { + return defaultInstance; + } + + public Scan getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Column column = 1; + public static final int COLUMN_FIELD_NUMBER = 1; + private java.util.List column_; + public java.util.List getColumnList() { + return column_; + } + public java.util.List + getColumnOrBuilderList() { + return column_; + } + public int getColumnCount() { + return column_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + return column_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + return column_.get(index); + } + + // repeated .NameBytesPair attribute = 2; + public static final int ATTRIBUTE_FIELD_NUMBER = 2; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional bytes startRow = 3; + public static final int STARTROW_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString startRow_; + public boolean hasStartRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getStartRow() { + return startRow_; + } + + // optional bytes stopRow = 4; + public static final int STOPROW_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString stopRow_; + public boolean hasStopRow() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getStopRow() { + return stopRow_; + } + + // optional .Filter filter = 5; + public static final int FILTER_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + return filter_; + } + + // optional .TimeRange timeRange = 6; + public static final int TIMERANGE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + public static final int MAXVERSIONS_FIELD_NUMBER = 7; + private int maxVersions_; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getMaxVersions() { + return maxVersions_; + } + + // optional bool cacheBlocks = 8 [default = true]; + public static final int CACHEBLOCKS_FIELD_NUMBER = 8; + private boolean cacheBlocks_; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + + // optional uint32 batchSize = 9; + public static final int BATCHSIZE_FIELD_NUMBER = 9; + private int batchSize_; + public boolean hasBatchSize() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getBatchSize() { + return batchSize_; + } + + // optional uint64 maxResultSize = 10; + public static final int MAXRESULTSIZE_FIELD_NUMBER = 10; + private long maxResultSize_; + public boolean hasMaxResultSize() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public long getMaxResultSize() { + return maxResultSize_; + } + + // optional uint32 storeLimit = 11; + public static final int STORELIMIT_FIELD_NUMBER = 11; + private int storeLimit_; + public boolean hasStoreLimit() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public int getStoreLimit() { + return storeLimit_; + } + + // optional uint32 storeOffset = 12; + public static final int STOREOFFSET_FIELD_NUMBER = 12; + private int storeOffset_; + public boolean hasStoreOffset() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + public int getStoreOffset() { + return storeOffset_; + } + + private void initFields() { + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + startRow_ = com.google.protobuf.ByteString.EMPTY; + stopRow_ = com.google.protobuf.ByteString.EMPTY; + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + maxVersions_ = 1; + cacheBlocks_ = true; + batchSize_ = 0; + maxResultSize_ = 0L; + storeLimit_ = 0; + storeOffset_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < column_.size(); i++) { + output.writeMessage(1, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(2, attribute_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(3, startRow_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(4, stopRow_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt32(9, batchSize_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeUInt64(10, maxResultSize_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + output.writeUInt32(11, storeLimit_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + output.writeUInt32(12, storeOffset_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < column_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, attribute_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, startRow_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, stopRow_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(9, batchSize_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(10, maxResultSize_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(11, storeLimit_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(12, storeOffset_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj; + + boolean result = true; + result = result && getColumnList() + .equals(other.getColumnList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasStartRow() == other.hasStartRow()); + if (hasStartRow()) { + result = result && getStartRow() + .equals(other.getStartRow()); + } + result = result && (hasStopRow() == other.hasStopRow()); + if (hasStopRow()) { + result = result && getStopRow() + .equals(other.getStopRow()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && (hasMaxVersions() == other.hasMaxVersions()); + if (hasMaxVersions()) { + result = result && (getMaxVersions() + == other.getMaxVersions()); + } + result = result && (hasCacheBlocks() == other.hasCacheBlocks()); + if (hasCacheBlocks()) { + result = result && (getCacheBlocks() + == other.getCacheBlocks()); + } + result = result && (hasBatchSize() == other.hasBatchSize()); + if (hasBatchSize()) { + result = result && (getBatchSize() + == other.getBatchSize()); + } + result = result && (hasMaxResultSize() == other.hasMaxResultSize()); + if (hasMaxResultSize()) { + result = result && (getMaxResultSize() + == other.getMaxResultSize()); + } + result = result && (hasStoreLimit() == other.hasStoreLimit()); + if (hasStoreLimit()) { + result = result && (getStoreLimit() + == other.getStoreLimit()); + } + result = result && (hasStoreOffset() == other.hasStoreOffset()); + if (hasStoreOffset()) { + result = result && (getStoreOffset() + == other.getStoreOffset()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getColumnCount() > 0) { + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumnList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasStartRow()) { + hash = (37 * hash) + STARTROW_FIELD_NUMBER; + hash = (53 * hash) + getStartRow().hashCode(); + } + if (hasStopRow()) { + hash = (37 * hash) + STOPROW_FIELD_NUMBER; + hash = (53 * hash) + getStopRow().hashCode(); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + if (hasMaxVersions()) { + hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getMaxVersions(); + } + if (hasCacheBlocks()) { + hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCacheBlocks()); + } + if (hasBatchSize()) { + hash = (37 * hash) + BATCHSIZE_FIELD_NUMBER; + hash = (53 * hash) + getBatchSize(); + } + if (hasMaxResultSize()) { + hash = (37 * hash) + MAXRESULTSIZE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getMaxResultSize()); + } + if (hasStoreLimit()) { + hash = (37 * hash) + STORELIMIT_FIELD_NUMBER; + hash = (53 * hash) + getStoreLimit(); + } + if (hasStoreOffset()) { + hash = (37 * hash) + STOREOFFSET_FIELD_NUMBER; + hash = (53 * hash) + getStoreOffset(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFieldBuilder(); + getAttributeFieldBuilder(); + getFilterFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + columnBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + attributeBuilder_.clear(); + } + startRow_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + stopRow_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + maxVersions_ = 1; + bitField0_ = (bitField0_ & ~0x00000040); + cacheBlocks_ = true; + bitField0_ = (bitField0_ & ~0x00000080); + batchSize_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + maxResultSize_ = 0L; + bitField0_ = (bitField0_ & ~0x00000200); + storeLimit_ = 0; + bitField0_ = (bitField0_ & ~0x00000400); + storeOffset_ = 0; + bitField0_ = (bitField0_ & ~0x00000800); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (columnBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + column_ = java.util.Collections.unmodifiableList(column_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.column_ = column_; + } else { + result.column_ = columnBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000001; + } + result.startRow_ = startRow_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000002; + } + result.stopRow_ = stopRow_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.maxVersions_ = maxVersions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.cacheBlocks_ = cacheBlocks_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000040; + } + result.batchSize_ = batchSize_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000080; + } + result.maxResultSize_ = maxResultSize_; + if (((from_bitField0_ & 0x00000400) == 0x00000400)) { + to_bitField0_ |= 0x00000100; + } + result.storeLimit_ = storeLimit_; + if (((from_bitField0_ & 0x00000800) == 0x00000800)) { + to_bitField0_ |= 0x00000200; + } + result.storeOffset_ = storeOffset_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; + if (columnBuilder_ == null) { + if (!other.column_.isEmpty()) { + if (column_.isEmpty()) { + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureColumnIsMutable(); + column_.addAll(other.column_); + } + onChanged(); + } + } else { + if (!other.column_.isEmpty()) { + if (columnBuilder_.isEmpty()) { + columnBuilder_.dispose(); + columnBuilder_ = null; + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000001); + columnBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFieldBuilder() : null; + } else { + columnBuilder_.addAllMessages(other.column_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000002); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasStartRow()) { + setStartRow(other.getStartRow()); + } + if (other.hasStopRow()) { + setStopRow(other.getStopRow()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + if (other.hasMaxVersions()) { + setMaxVersions(other.getMaxVersions()); + } + if (other.hasCacheBlocks()) { + setCacheBlocks(other.getCacheBlocks()); + } + if (other.hasBatchSize()) { + setBatchSize(other.getBatchSize()); + } + if (other.hasMaxResultSize()) { + setMaxResultSize(other.getMaxResultSize()); + } + if (other.hasStoreLimit()) { + setStoreLimit(other.getStoreLimit()); + } + if (other.hasStoreOffset()) { + setStoreOffset(other.getStoreOffset()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumn(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 26: { + bitField0_ |= 0x00000004; + startRow_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + stopRow_ = input.readBytes(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + cacheBlocks_ = input.readBool(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + batchSize_ = input.readUInt32(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + maxResultSize_ = input.readUInt64(); + break; + } + case 88: { + bitField0_ |= 0x00000400; + storeLimit_ = input.readUInt32(); + break; + } + case 96: { + bitField0_ |= 0x00000800; + storeOffset_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Column column = 1; + private java.util.List column_ = + java.util.Collections.emptyList(); + private void ensureColumnIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + column_ = new java.util.ArrayList(column_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; + + public java.util.List getColumnList() { + if (columnBuilder_ == null) { + return java.util.Collections.unmodifiableList(column_); + } else { + return columnBuilder_.getMessageList(); + } + } + public int getColumnCount() { + if (columnBuilder_ == null) { + return column_.size(); + } else { + return columnBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + if (columnBuilder_ == null) { + return column_.get(index); + } else { + return columnBuilder_.getMessage(index); + } + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.set(index, value); + onChanged(); + } else { + columnBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.set(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(value); + onChanged(); + } else { + columnBuilder_.addMessage(value); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(index, value); + onChanged(); + } else { + columnBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumn( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumn( + java.lang.Iterable values) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + super.addAll(values, column_); + onChanged(); + } else { + columnBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumn() { + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + columnBuilder_.clear(); + } + return this; + } + public Builder removeColumn(int index) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.remove(index); + onChanged(); + } else { + columnBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( + int index) { + return getColumnFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + if (columnBuilder_ == null) { + return column_.get(index); } else { + return columnBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnOrBuilderList() { + if (columnBuilder_ != null) { + return columnBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(column_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { + return getColumnFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( + int index) { + return getColumnFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public java.util.List + getColumnBuilderList() { + return getColumnFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> + getColumnFieldBuilder() { + if (columnBuilder_ == null) { + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( + column_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + column_ = null; + } + return columnBuilder_; + } + + // repeated .NameBytesPair attribute = 2; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + attribute_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional bytes startRow = 3; + private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStartRow() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartRow() { + return startRow_; + } + public Builder setStartRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + startRow_ = value; + onChanged(); + return this; + } + public Builder clearStartRow() { + bitField0_ = (bitField0_ & ~0x00000004); + startRow_ = getDefaultInstance().getStartRow(); + onChanged(); + return this; + } + + // optional bytes stopRow = 4; + private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStopRow() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getStopRow() { + return stopRow_; + } + public Builder setStopRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + stopRow_ = value; + onChanged(); + return this; + } + public Builder clearStopRow() { + bitField0_ = (bitField0_ & ~0x00000008); + stopRow_ = getDefaultInstance().getStopRow(); + onChanged(); + return this; + } + + // optional .Filter filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // optional .TimeRange timeRange = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + private int maxVersions_ = 1; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getMaxVersions() { + return maxVersions_; + } + public Builder setMaxVersions(int value) { + bitField0_ |= 0x00000040; + maxVersions_ = value; + onChanged(); + return this; + } + public Builder clearMaxVersions() { + bitField0_ = (bitField0_ & ~0x00000040); + maxVersions_ = 1; + onChanged(); + return this; + } + + // optional bool cacheBlocks = 8 [default = true]; + private boolean cacheBlocks_ = true; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + public Builder setCacheBlocks(boolean value) { + bitField0_ |= 0x00000080; + cacheBlocks_ = value; + onChanged(); + return this; + } + public Builder clearCacheBlocks() { + bitField0_ = (bitField0_ & ~0x00000080); + cacheBlocks_ = true; + onChanged(); + return this; + } + + // optional uint32 batchSize = 9; + private int batchSize_ ; + public boolean hasBatchSize() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public int getBatchSize() { + return batchSize_; + } + public Builder setBatchSize(int value) { + bitField0_ |= 0x00000100; + batchSize_ = value; + onChanged(); + return this; + } + public Builder clearBatchSize() { + bitField0_ = (bitField0_ & ~0x00000100); + batchSize_ = 0; + onChanged(); + return this; + } + + // optional uint64 maxResultSize = 10; + private long maxResultSize_ ; + public boolean hasMaxResultSize() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + public long getMaxResultSize() { + return maxResultSize_; + } + public Builder setMaxResultSize(long value) { + bitField0_ |= 0x00000200; + maxResultSize_ = value; + onChanged(); + return this; + } + public Builder clearMaxResultSize() { + bitField0_ = (bitField0_ & ~0x00000200); + maxResultSize_ = 0L; + onChanged(); + return this; + } + + // optional uint32 storeLimit = 11; + private int storeLimit_ ; + public boolean hasStoreLimit() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + public int getStoreLimit() { + return storeLimit_; + } + public Builder setStoreLimit(int value) { + bitField0_ |= 0x00000400; + storeLimit_ = value; + onChanged(); + return this; + } + public Builder clearStoreLimit() { + bitField0_ = (bitField0_ & ~0x00000400); + storeLimit_ = 0; + onChanged(); + return this; + } + + // optional uint32 storeOffset = 12; + private int storeOffset_ ; + public boolean hasStoreOffset() { + return ((bitField0_ & 0x00000800) == 0x00000800); + } + public int getStoreOffset() { + return storeOffset_; + } + public Builder setStoreOffset(int value) { + bitField0_ |= 0x00000800; + storeOffset_ = value; + onChanged(); + return this; + } + public Builder clearStoreOffset() { + bitField0_ = (bitField0_ & ~0x00000800); + storeOffset_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Scan) + } + + static { + defaultInstance = new Scan(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Scan) + } + + public interface ScanRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional .Scan scan = 2; + boolean hasScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // optional uint64 scannerId = 3; + boolean hasScannerId(); + long getScannerId(); + + // optional uint32 numberOfRows = 4; + boolean hasNumberOfRows(); + int getNumberOfRows(); + + // optional bool closeScanner = 5; + boolean hasCloseScanner(); + boolean getCloseScanner(); + + // optional uint64 nextCallSeq = 6; + boolean hasNextCallSeq(); + long getNextCallSeq(); + } + public static final class ScanRequest extends + com.google.protobuf.GeneratedMessage + implements ScanRequestOrBuilder { + // Use ScanRequest.newBuilder() to construct. + private ScanRequest(Builder builder) { + super(builder); + } + private ScanRequest(boolean noInit) {} + + private static final ScanRequest defaultInstance; + public static ScanRequest getDefaultInstance() { + return defaultInstance; + } + + public ScanRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; + } + + private int bitField0_; + // optional .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional .Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // optional uint64 scannerId = 3; + public static final int SCANNERID_FIELD_NUMBER = 3; + private long scannerId_; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getScannerId() { + return scannerId_; + } + + // optional uint32 numberOfRows = 4; + public static final int NUMBEROFROWS_FIELD_NUMBER = 4; + private int numberOfRows_; + public boolean hasNumberOfRows() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getNumberOfRows() { + return numberOfRows_; + } + + // optional bool closeScanner = 5; + public static final int CLOSESCANNER_FIELD_NUMBER = 5; + private boolean closeScanner_; + public boolean hasCloseScanner() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getCloseScanner() { + return closeScanner_; + } + + // optional uint64 nextCallSeq = 6; + public static final int NEXTCALLSEQ_FIELD_NUMBER = 6; + private long nextCallSeq_; + public boolean hasNextCallSeq() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public long getNextCallSeq() { + return nextCallSeq_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + scannerId_ = 0L; + numberOfRows_ = 0; + closeScanner_ = false; + nextCallSeq_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasRegion()) { + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasScan()) { + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, scannerId_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt32(4, numberOfRows_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, closeScanner_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeUInt64(6, nextCallSeq_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, scannerId_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, numberOfRows_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, closeScanner_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(6, nextCallSeq_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } + result = result && (hasNumberOfRows() == other.hasNumberOfRows()); + if (hasNumberOfRows()) { + result = result && (getNumberOfRows() + == other.getNumberOfRows()); + } + result = result && (hasCloseScanner() == other.hasCloseScanner()); + if (hasCloseScanner()) { + result = result && (getCloseScanner() + == other.getCloseScanner()); + } + result = result && (hasNextCallSeq() == other.hasNextCallSeq()); + if (hasNextCallSeq()) { + result = result && (getNextCallSeq() + == other.getNextCallSeq()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } + if (hasNumberOfRows()) { + hash = (37 * hash) + NUMBEROFROWS_FIELD_NUMBER; + hash = (53 * hash) + getNumberOfRows(); + } + if (hasCloseScanner()) { + hash = (37 * hash) + CLOSESCANNER_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCloseScanner()); + } + if (hasNextCallSeq()) { + hash = (37 * hash) + NEXTCALLSEQ_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getNextCallSeq()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + scannerId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + numberOfRows_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + closeScanner_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + nextCallSeq_ = 0L; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.scannerId_ = scannerId_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.numberOfRows_ = numberOfRows_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.closeScanner_ = closeScanner_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.nextCallSeq_ = nextCallSeq_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } + if (other.hasNumberOfRows()) { + setNumberOfRows(other.getNumberOfRows()); + } + if (other.hasCloseScanner()) { + setCloseScanner(other.getCloseScanner()); + } + if (other.hasNextCallSeq()) { + setNextCallSeq(other.getNextCallSeq()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasRegion()) { + if (!getRegion().isInitialized()) { + + return false; + } + } + if (hasScan()) { + if (!getScan().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); + if (hasScan()) { + subBuilder.mergeFrom(getScan()); + } + input.readMessage(subBuilder, extensionRegistry); + setScan(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + scannerId_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + numberOfRows_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + closeScanner_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + nextCallSeq_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // optional .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional .Scan scan = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // optional uint64 scannerId = 3; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000004; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000004); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional uint32 numberOfRows = 4; + private int numberOfRows_ ; + public boolean hasNumberOfRows() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getNumberOfRows() { + return numberOfRows_; + } + public Builder setNumberOfRows(int value) { + bitField0_ |= 0x00000008; + numberOfRows_ = value; + onChanged(); + return this; + } + public Builder clearNumberOfRows() { + bitField0_ = (bitField0_ & ~0x00000008); + numberOfRows_ = 0; + onChanged(); + return this; + } + + // optional bool closeScanner = 5; + private boolean closeScanner_ ; + public boolean hasCloseScanner() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getCloseScanner() { + return closeScanner_; + } + public Builder setCloseScanner(boolean value) { + bitField0_ |= 0x00000010; + closeScanner_ = value; + onChanged(); + return this; + } + public Builder clearCloseScanner() { + bitField0_ = (bitField0_ & ~0x00000010); + closeScanner_ = false; + onChanged(); + return this; + } + + // optional uint64 nextCallSeq = 6; + private long nextCallSeq_ ; + public boolean hasNextCallSeq() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public long getNextCallSeq() { + return nextCallSeq_; + } + public Builder setNextCallSeq(long value) { + bitField0_ |= 0x00000020; + nextCallSeq_ = value; + onChanged(); + return this; + } + public Builder clearNextCallSeq() { + bitField0_ = (bitField0_ & ~0x00000020); + nextCallSeq_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ScanRequest) + } + + static { + defaultInstance = new ScanRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ScanRequest) + } + + public interface ScanResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Result result = 1; + java.util.List + getResultList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index); + int getResultCount(); + java.util.List + getResultOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( + int index); + + // optional uint64 scannerId = 2; + boolean hasScannerId(); + long getScannerId(); + + // optional bool moreResults = 3; + boolean hasMoreResults(); + boolean getMoreResults(); + + // optional uint32 ttl = 4; + boolean hasTtl(); + int getTtl(); + } + public static final class ScanResponse extends + com.google.protobuf.GeneratedMessage + implements ScanResponseOrBuilder { + // Use ScanResponse.newBuilder() to construct. + private ScanResponse(Builder builder) { + super(builder); + } + private ScanResponse(boolean noInit) {} + + private static final ScanResponse defaultInstance; + public static ScanResponse getDefaultInstance() { + return defaultInstance; + } + + public ScanResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + public java.util.List getResultList() { + return result_; + } + public java.util.List + getResultOrBuilderList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { + return result_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + // optional uint64 scannerId = 2; + public static final int SCANNERID_FIELD_NUMBER = 2; + private long scannerId_; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getScannerId() { + return scannerId_; + } + + // optional bool moreResults = 3; + public static final int MORERESULTS_FIELD_NUMBER = 3; + private boolean moreResults_; + public boolean hasMoreResults() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMoreResults() { + return moreResults_; + } + + // optional uint32 ttl = 4; + public static final int TTL_FIELD_NUMBER = 4; + private int ttl_; + public boolean hasTtl() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getTtl() { + return ttl_; + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + scannerId_ = 0L; + moreResults_ = false; + ttl_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(2, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, moreResults_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt32(4, ttl_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, moreResults_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, ttl_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } + result = result && (hasMoreResults() == other.hasMoreResults()); + if (hasMoreResults()) { + result = result && (getMoreResults() + == other.getMoreResults()); + } + result = result && (hasTtl() == other.hasTtl()); + if (hasTtl()) { + result = result && (getTtl() + == other.getTtl()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } + if (hasMoreResults()) { + hash = (37 * hash) + MORERESULTS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMoreResults()); + } + if (hasTtl()) { + hash = (37 * hash) + TTL_FIELD_NUMBER; + hash = (53 * hash) + getTtl(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + scannerId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + moreResults_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + ttl_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.scannerId_ = scannerId_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.moreResults_ = moreResults_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + result.ttl_ = ttl_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } + if (other.hasMoreResults()) { + setMoreResults(other.getMoreResults()); + } + if (other.hasTtl()) { + setTtl(other.getTtl()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + scannerId_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + moreResults_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + ttl_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Result result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); + } + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional uint64 scannerId = 2; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000002; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000002); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional bool moreResults = 3; + private boolean moreResults_ ; + public boolean hasMoreResults() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getMoreResults() { + return moreResults_; + } + public Builder setMoreResults(boolean value) { + bitField0_ |= 0x00000004; + moreResults_ = value; + onChanged(); + return this; + } + public Builder clearMoreResults() { + bitField0_ = (bitField0_ & ~0x00000004); + moreResults_ = false; + onChanged(); + return this; + } + + // optional uint32 ttl = 4; + private int ttl_ ; + public boolean hasTtl() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getTtl() { + return ttl_; + } + public Builder setTtl(int value) { + bitField0_ |= 0x00000008; + ttl_ = value; + onChanged(); + return this; + } + public Builder clearTtl() { + bitField0_ = (bitField0_ & ~0x00000008); + ttl_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ScanResponse) + } + + static { + defaultInstance = new ScanResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ScanResponse) + } + + public interface LockRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated bytes row = 2; + java.util.List getRowList(); + int getRowCount(); + com.google.protobuf.ByteString getRow(int index); + } + public static final class LockRowRequest extends + com.google.protobuf.GeneratedMessage + implements LockRowRequestOrBuilder { + // Use LockRowRequest.newBuilder() to construct. + private LockRowRequest(Builder builder) { + super(builder); + } + private LockRowRequest(boolean noInit) {} + + private static final LockRowRequest defaultInstance; + public static LockRowRequest getDefaultInstance() { + return defaultInstance; + } + + public LockRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated bytes row = 2; + public static final int ROW_FIELD_NUMBER = 2; + private java.util.List row_; + public java.util.List + getRowList() { + return row_; + } + public int getRowCount() { + return row_.size(); + } + public com.google.protobuf.ByteString getRow(int index) { + return row_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + row_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < row_.size(); i++) { + output.writeBytes(2, row_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + { + int dataSize = 0; + for (int i = 0; i < row_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(row_.get(i)); + } + size += dataSize; + size += 1 * getRowList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getRowList() + .equals(other.getRowList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getRowCount() > 0) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRowList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + row_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + row_ = java.util.Collections.unmodifiableList(row_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.row_ = row_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (!other.row_.isEmpty()) { + if (row_.isEmpty()) { + row_ = other.row_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureRowIsMutable(); + row_.addAll(other.row_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + ensureRowIsMutable(); + row_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated bytes row = 2; + private java.util.List row_ = java.util.Collections.emptyList();; + private void ensureRowIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + row_ = new java.util.ArrayList(row_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getRowList() { + return java.util.Collections.unmodifiableList(row_); + } + public int getRowCount() { + return row_.size(); + } + public com.google.protobuf.ByteString getRow(int index) { + return row_.get(index); + } + public Builder setRow( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowIsMutable(); + row_.set(index, value); + onChanged(); + return this; + } + public Builder addRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowIsMutable(); + row_.add(value); + onChanged(); + return this; + } + public Builder addAllRow( + java.lang.Iterable values) { + ensureRowIsMutable(); + super.addAll(values, row_); + onChanged(); + return this; + } + public Builder clearRow() { + row_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LockRowRequest) + } + + static { + defaultInstance = new LockRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LockRowRequest) + } + + public interface LockRowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lockId = 1; + boolean hasLockId(); + long getLockId(); + + // optional uint32 ttl = 2; + boolean hasTtl(); + int getTtl(); + } + public static final class LockRowResponse extends + com.google.protobuf.GeneratedMessage + implements LockRowResponseOrBuilder { + // Use LockRowResponse.newBuilder() to construct. + private LockRowResponse(Builder builder) { + super(builder); + } + private LockRowResponse(boolean noInit) {} + + private static final LockRowResponse defaultInstance; + public static LockRowResponse getDefaultInstance() { + return defaultInstance; + } + + public LockRowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lockId = 1; + public static final int LOCKID_FIELD_NUMBER = 1; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLockId() { + return lockId_; + } + + // optional uint32 ttl = 2; + public static final int TTL_FIELD_NUMBER = 2; + private int ttl_; + public boolean hasTtl() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTtl() { + return ttl_; + } + + private void initFields() { + lockId_ = 0L; + ttl_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLockId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lockId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, ttl_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lockId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, ttl_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) obj; + + boolean result = true; + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasTtl() == other.hasTtl()); + if (hasTtl()) { + result = result && (getTtl() + == other.getTtl()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasTtl()) { + hash = (37 * hash) + TTL_FIELD_NUMBER; + hash = (53 * hash) + getTtl(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + ttl_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ttl_ = ttl_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()) return this; + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasTtl()) { + setTtl(other.getTtl()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLockId()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lockId_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ttl_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lockId = 1; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000001; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000001); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional uint32 ttl = 2; + private int ttl_ ; + public boolean hasTtl() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTtl() { + return ttl_; + } + public Builder setTtl(int value) { + bitField0_ |= 0x00000002; + ttl_ = value; + onChanged(); + return this; + } + public Builder clearTtl() { + bitField0_ = (bitField0_ & ~0x00000002); + ttl_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LockRowResponse) + } + + static { + defaultInstance = new LockRowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LockRowResponse) + } + + public interface UnlockRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required uint64 lockId = 2; + boolean hasLockId(); + long getLockId(); + } + public static final class UnlockRowRequest extends + com.google.protobuf.GeneratedMessage + implements UnlockRowRequestOrBuilder { + // Use UnlockRowRequest.newBuilder() to construct. + private UnlockRowRequest(Builder builder) { + super(builder); + } + private UnlockRowRequest(boolean noInit) {} + + private static final UnlockRowRequest defaultInstance; + public static UnlockRowRequest getDefaultInstance() { + return defaultInstance; + } + + public UnlockRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required uint64 lockId = 2; + public static final int LOCKID_FIELD_NUMBER = 2; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + lockId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLockId()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, lockId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, lockId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.lockId_ = lockId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasLockId()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + lockId_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required uint64 lockId = 2; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000002; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000002); + lockId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UnlockRowRequest) + } + + static { + defaultInstance = new UnlockRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnlockRowRequest) + } + + public interface UnlockRowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class UnlockRowResponse extends + com.google.protobuf.GeneratedMessage + implements UnlockRowResponseOrBuilder { + // Use UnlockRowResponse.newBuilder() to construct. + private UnlockRowResponse(Builder builder) { + super(builder); + } + private UnlockRowResponse(boolean noInit) {} + + private static final UnlockRowResponse defaultInstance; + public static UnlockRowResponse getDefaultInstance() { + return defaultInstance; + } + + public UnlockRowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:UnlockRowResponse) + } + + static { + defaultInstance = new UnlockRowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnlockRowResponse) + } + + public interface BulkLoadHFileRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + java.util.List + getFamilyPathList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + int getFamilyPathCount(); + java.util.List + getFamilyPathOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index); + + // optional bool assignSeqNum = 3; + boolean hasAssignSeqNum(); + boolean getAssignSeqNum(); + } + public static final class BulkLoadHFileRequest extends + com.google.protobuf.GeneratedMessage + implements BulkLoadHFileRequestOrBuilder { + // Use BulkLoadHFileRequest.newBuilder() to construct. + private BulkLoadHFileRequest(Builder builder) { + super(builder); + } + private BulkLoadHFileRequest(boolean noInit) {} + + private static final BulkLoadHFileRequest defaultInstance; + public static BulkLoadHFileRequest getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadHFileRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + } + + public interface FamilyPathOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required string path = 2; + boolean hasPath(); + String getPath(); + } + public static final class FamilyPath extends + com.google.protobuf.GeneratedMessage + implements FamilyPathOrBuilder { + // Use FamilyPath.newBuilder() to construct. + private FamilyPath(Builder builder) { + super(builder); + } + private FamilyPath(boolean noInit) {} + + private static final FamilyPath defaultInstance; + public static FamilyPath getDefaultInstance() { + return defaultInstance; + } + + public FamilyPath getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required string path = 2; + public static final int PATH_FIELD_NUMBER = 2; + private java.lang.Object path_; + public boolean hasPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getPath() { + java.lang.Object ref = path_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + path_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + path_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasPath()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getPathBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getPathBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasPath() == other.hasPath()); + if (hasPath()) { + result = result && getPath() + .equals(other.getPath()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasPath()) { + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPath().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + path_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.path_ = path_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasPath()) { + setPath(other.getPath()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasPath()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + path_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required string path = 2; + private java.lang.Object path_ = ""; + public boolean hasPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getPath() { + java.lang.Object ref = path_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + path_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setPath(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + path_ = value; + onChanged(); + return this; + } + public Builder clearPath() { + bitField0_ = (bitField0_ & ~0x00000002); + path_ = getDefaultInstance().getPath(); + onChanged(); + return this; + } + void setPath(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + path_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath) + } + + static { + defaultInstance = new FamilyPath(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath) + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + public static final int FAMILYPATH_FIELD_NUMBER = 2; + private java.util.List familyPath_; + public java.util.List getFamilyPathList() { + return familyPath_; + } + public java.util.List + getFamilyPathOrBuilderList() { + return familyPath_; + } + public int getFamilyPathCount() { + return familyPath_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + return familyPath_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + return familyPath_.get(index); + } + + // optional bool assignSeqNum = 3; + public static final int ASSIGNSEQNUM_FIELD_NUMBER = 3; + private boolean assignSeqNum_; + public boolean hasAssignSeqNum() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getAssignSeqNum() { + return assignSeqNum_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + familyPath_ = java.util.Collections.emptyList(); + assignSeqNum_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < familyPath_.size(); i++) { + output.writeMessage(2, familyPath_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, assignSeqNum_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + for (int i = 0; i < familyPath_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, familyPath_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, assignSeqNum_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getFamilyPathList() + .equals(other.getFamilyPathList()); + result = result && (hasAssignSeqNum() == other.hasAssignSeqNum()); + if (hasAssignSeqNum()) { + result = result && (getAssignSeqNum() + == other.getAssignSeqNum()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getFamilyPathCount() > 0) { + hash = (37 * hash) + FAMILYPATH_FIELD_NUMBER; + hash = (53 * hash) + getFamilyPathList().hashCode(); + } + if (hasAssignSeqNum()) { + hash = (37 * hash) + ASSIGNSEQNUM_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getAssignSeqNum()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getFamilyPathFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + familyPathBuilder_.clear(); + } + assignSeqNum_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (familyPathBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = java.util.Collections.unmodifiableList(familyPath_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.familyPath_ = familyPath_; + } else { + result.familyPath_ = familyPathBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.assignSeqNum_ = assignSeqNum_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (familyPathBuilder_ == null) { + if (!other.familyPath_.isEmpty()) { + if (familyPath_.isEmpty()) { + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyPathIsMutable(); + familyPath_.addAll(other.familyPath_); + } + onChanged(); + } + } else { + if (!other.familyPath_.isEmpty()) { + if (familyPathBuilder_.isEmpty()) { + familyPathBuilder_.dispose(); + familyPathBuilder_ = null; + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000002); + familyPathBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyPathFieldBuilder() : null; + } else { + familyPathBuilder_.addAllMessages(other.familyPath_); + } + } + } + if (other.hasAssignSeqNum()) { + setAssignSeqNum(other.getAssignSeqNum()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyPath(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + assignSeqNum_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + private java.util.List familyPath_ = + java.util.Collections.emptyList(); + private void ensureFamilyPathIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = new java.util.ArrayList(familyPath_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; + + public java.util.List getFamilyPathList() { + if (familyPathBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyPath_); + } else { + return familyPathBuilder_.getMessageList(); + } + } + public int getFamilyPathCount() { + if (familyPathBuilder_ == null) { + return familyPath_.size(); + } else { + return familyPathBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); + } else { + return familyPathBuilder_.getMessage(index); + } + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.set(index, value); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.set(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(value); + onChanged(); + } else { + familyPathBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(index, value); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyPath( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyPath( + java.lang.Iterable values) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + super.addAll(values, familyPath_); + onChanged(); + } else { + familyPathBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyPath() { + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + familyPathBuilder_.clear(); + } + return this; + } + public Builder removeFamilyPath(int index) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.remove(index); + onChanged(); + } else { + familyPathBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); } else { + return familyPathBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyPathOrBuilderList() { + if (familyPathBuilder_ != null) { + return familyPathBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyPath_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { + return getFamilyPathFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public java.util.List + getFamilyPathBuilderList() { + return getFamilyPathFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> + getFamilyPathFieldBuilder() { + if (familyPathBuilder_ == null) { + familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( + familyPath_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + familyPath_ = null; + } + return familyPathBuilder_; + } + + // optional bool assignSeqNum = 3; + private boolean assignSeqNum_ ; + public boolean hasAssignSeqNum() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getAssignSeqNum() { + return assignSeqNum_; + } + public Builder setAssignSeqNum(boolean value) { + bitField0_ |= 0x00000004; + assignSeqNum_ = value; + onChanged(); + return this; + } + public Builder clearAssignSeqNum() { + bitField0_ = (bitField0_ & ~0x00000004); + assignSeqNum_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest) + } + + static { + defaultInstance = new BulkLoadHFileRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest) + } + + public interface BulkLoadHFileResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool loaded = 1; + boolean hasLoaded(); + boolean getLoaded(); + } + public static final class BulkLoadHFileResponse extends + com.google.protobuf.GeneratedMessage + implements BulkLoadHFileResponseOrBuilder { + // Use BulkLoadHFileResponse.newBuilder() to construct. + private BulkLoadHFileResponse(Builder builder) { + super(builder); + } + private BulkLoadHFileResponse(boolean noInit) {} + + private static final BulkLoadHFileResponse defaultInstance; + public static BulkLoadHFileResponse getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadHFileResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool loaded = 1; + public static final int LOADED_FIELD_NUMBER = 1; + private boolean loaded_; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + + private void initFields() { + loaded_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLoaded()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, loaded_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, loaded_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; + + boolean result = true; + result = result && (hasLoaded() == other.hasLoaded()); + if (hasLoaded()) { + result = result && (getLoaded() + == other.getLoaded()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLoaded()) { + hash = (37 * hash) + LOADED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLoaded()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + loaded_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.loaded_ = loaded_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; + if (other.hasLoaded()) { + setLoaded(other.getLoaded()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLoaded()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + loaded_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool loaded = 1; + private boolean loaded_ ; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + public Builder setLoaded(boolean value) { + bitField0_ |= 0x00000001; + loaded_ = value; + onChanged(); + return this; + } + public Builder clearLoaded() { + bitField0_ = (bitField0_ & ~0x00000001); + loaded_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse) + } + + static { + defaultInstance = new BulkLoadHFileResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) + } + + public interface ExecOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required string protocolName = 2; + boolean hasProtocolName(); + String getProtocolName(); + + // required string methodName = 3; + boolean hasMethodName(); + String getMethodName(); + + // repeated .NameStringPair property = 4; + java.util.List + getPropertyList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index); + int getPropertyCount(); + java.util.List + getPropertyOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( + int index); + + // repeated .NameBytesPair parameter = 5; + java.util.List + getParameterList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index); + int getParameterCount(); + java.util.List + getParameterOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( + int index); + } + public static final class Exec extends + com.google.protobuf.GeneratedMessage + implements ExecOrBuilder { + // Use Exec.newBuilder() to construct. + private Exec(Builder builder) { + super(builder); + } + private Exec(boolean noInit) {} + + private static final Exec defaultInstance; + public static Exec getDefaultInstance() { + return defaultInstance; + } + + public Exec getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required string protocolName = 2; + public static final int PROTOCOLNAME_FIELD_NUMBER = 2; + private java.lang.Object protocolName_; + public boolean hasProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocolName() { + java.lang.Object ref = protocolName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + protocolName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getProtocolNameBytes() { + java.lang.Object ref = protocolName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + protocolName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string methodName = 3; + public static final int METHODNAME_FIELD_NUMBER = 3; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + methodName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // repeated .NameStringPair property = 4; + public static final int PROPERTY_FIELD_NUMBER = 4; + private java.util.List property_; + public java.util.List getPropertyList() { + return property_; + } + public java.util.List + getPropertyOrBuilderList() { + return property_; + } + public int getPropertyCount() { + return property_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { + return property_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( + int index) { + return property_.get(index); + } + + // repeated .NameBytesPair parameter = 5; + public static final int PARAMETER_FIELD_NUMBER = 5; + private java.util.List parameter_; + public java.util.List getParameterList() { + return parameter_; + } + public java.util.List + getParameterOrBuilderList() { + return parameter_; + } + public int getParameterCount() { + return parameter_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { + return parameter_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( + int index) { + return parameter_.get(index); + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + protocolName_ = ""; + methodName_ = ""; + property_ = java.util.Collections.emptyList(); + parameter_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasProtocolName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getPropertyCount(); i++) { + if (!getProperty(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getParameterCount(); i++) { + if (!getParameter(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMethodNameBytes()); + } + for (int i = 0; i < property_.size(); i++) { + output.writeMessage(4, property_.get(i)); + } + for (int i = 0; i < parameter_.size(); i++) { + output.writeMessage(5, parameter_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMethodNameBytes()); + } + for (int i = 0; i < property_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, property_.get(i)); + } + for (int i = 0; i < parameter_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, parameter_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasProtocolName() == other.hasProtocolName()); + if (hasProtocolName()) { + result = result && getProtocolName() + .equals(other.getProtocolName()); + } + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && getPropertyList() + .equals(other.getPropertyList()); + result = result && getParameterList() + .equals(other.getParameterList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasProtocolName()) { + hash = (37 * hash) + PROTOCOLNAME_FIELD_NUMBER; + hash = (53 * hash) + getProtocolName().hashCode(); + } + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (getPropertyCount() > 0) { + hash = (37 * hash) + PROPERTY_FIELD_NUMBER; + hash = (53 * hash) + getPropertyList().hashCode(); + } + if (getParameterCount() > 0) { + hash = (37 * hash) + PARAMETER_FIELD_NUMBER; + hash = (53 * hash) + getParameterList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPropertyFieldBuilder(); + getParameterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + protocolName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + if (propertyBuilder_ == null) { + property_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + propertyBuilder_.clear(); + } + if (parameterBuilder_ == null) { + parameter_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + parameterBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.protocolName_ = protocolName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.methodName_ = methodName_; + if (propertyBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + property_ = java.util.Collections.unmodifiableList(property_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.property_ = property_; + } else { + result.property_ = propertyBuilder_.build(); + } + if (parameterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + parameter_ = java.util.Collections.unmodifiableList(parameter_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.parameter_ = parameter_; + } else { + result.parameter_ = parameterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasProtocolName()) { + setProtocolName(other.getProtocolName()); + } + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (propertyBuilder_ == null) { + if (!other.property_.isEmpty()) { + if (property_.isEmpty()) { + property_ = other.property_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensurePropertyIsMutable(); + property_.addAll(other.property_); + } + onChanged(); + } + } else { + if (!other.property_.isEmpty()) { + if (propertyBuilder_.isEmpty()) { + propertyBuilder_.dispose(); + propertyBuilder_ = null; + property_ = other.property_; + bitField0_ = (bitField0_ & ~0x00000008); + propertyBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPropertyFieldBuilder() : null; + } else { + propertyBuilder_.addAllMessages(other.property_); + } + } + } + if (parameterBuilder_ == null) { + if (!other.parameter_.isEmpty()) { + if (parameter_.isEmpty()) { + parameter_ = other.parameter_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureParameterIsMutable(); + parameter_.addAll(other.parameter_); + } + onChanged(); + } + } else { + if (!other.parameter_.isEmpty()) { + if (parameterBuilder_.isEmpty()) { + parameterBuilder_.dispose(); + parameterBuilder_ = null; + parameter_ = other.parameter_; + bitField0_ = (bitField0_ & ~0x00000010); + parameterBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getParameterFieldBuilder() : null; + } else { + parameterBuilder_.addAllMessages(other.parameter_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasProtocolName()) { + + return false; + } + if (!hasMethodName()) { + + return false; + } + for (int i = 0; i < getPropertyCount(); i++) { + if (!getProperty(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getParameterCount(); i++) { + if (!getParameter(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + protocolName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addProperty(subBuilder.buildPartial()); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addParameter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required string protocolName = 2; + private java.lang.Object protocolName_ = ""; + public boolean hasProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocolName() { + java.lang.Object ref = protocolName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + protocolName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setProtocolName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + protocolName_ = value; + onChanged(); + return this; + } + public Builder clearProtocolName() { + bitField0_ = (bitField0_ & ~0x00000002); + protocolName_ = getDefaultInstance().getProtocolName(); + onChanged(); + return this; + } + void setProtocolName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + protocolName_ = value; + onChanged(); + } + + // required string methodName = 3; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000004); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + } + + // repeated .NameStringPair property = 4; + private java.util.List property_ = + java.util.Collections.emptyList(); + private void ensurePropertyIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + property_ = new java.util.ArrayList(property_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> propertyBuilder_; + + public java.util.List getPropertyList() { + if (propertyBuilder_ == null) { + return java.util.Collections.unmodifiableList(property_); + } else { + return propertyBuilder_.getMessageList(); + } + } + public int getPropertyCount() { + if (propertyBuilder_ == null) { + return property_.size(); + } else { + return propertyBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { + if (propertyBuilder_ == null) { + return property_.get(index); + } else { + return propertyBuilder_.getMessage(index); + } + } + public Builder setProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.set(index, value); + onChanged(); + } else { + propertyBuilder_.setMessage(index, value); + } + return this; + } + public Builder setProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.set(index, builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.add(value); + onChanged(); + } else { + propertyBuilder_.addMessage(value); + } + return this; + } + public Builder addProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.add(index, value); + onChanged(); + } else { + propertyBuilder_.addMessage(index, value); + } + return this; + } + public Builder addProperty( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.add(builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.add(index, builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllProperty( + java.lang.Iterable values) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + super.addAll(values, property_); + onChanged(); + } else { + propertyBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearProperty() { + if (propertyBuilder_ == null) { + property_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + propertyBuilder_.clear(); + } + return this; + } + public Builder removeProperty(int index) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.remove(index); + onChanged(); + } else { + propertyBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getPropertyBuilder( + int index) { + return getPropertyFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( + int index) { + if (propertyBuilder_ == null) { + return property_.get(index); } else { + return propertyBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPropertyOrBuilderList() { + if (propertyBuilder_ != null) { + return propertyBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(property_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder() { + return getPropertyFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder( + int index) { + return getPropertyFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + public java.util.List + getPropertyBuilderList() { + return getPropertyFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> + getPropertyFieldBuilder() { + if (propertyBuilder_ == null) { + propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( + property_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + property_ = null; + } + return propertyBuilder_; + } + + // repeated .NameBytesPair parameter = 5; + private java.util.List parameter_ = + java.util.Collections.emptyList(); + private void ensureParameterIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + parameter_ = new java.util.ArrayList(parameter_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> parameterBuilder_; + + public java.util.List getParameterList() { + if (parameterBuilder_ == null) { + return java.util.Collections.unmodifiableList(parameter_); + } else { + return parameterBuilder_.getMessageList(); + } + } + public int getParameterCount() { + if (parameterBuilder_ == null) { + return parameter_.size(); + } else { + return parameterBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { + if (parameterBuilder_ == null) { + return parameter_.get(index); + } else { + return parameterBuilder_.getMessage(index); + } + } + public Builder setParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.set(index, value); + onChanged(); + } else { + parameterBuilder_.setMessage(index, value); + } + return this; + } + public Builder setParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.set(index, builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.add(value); + onChanged(); + } else { + parameterBuilder_.addMessage(value); + } + return this; + } + public Builder addParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.add(index, value); + onChanged(); + } else { + parameterBuilder_.addMessage(index, value); + } + return this; + } + public Builder addParameter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.add(builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.add(index, builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllParameter( + java.lang.Iterable values) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + super.addAll(values, parameter_); + onChanged(); + } else { + parameterBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearParameter() { + if (parameterBuilder_ == null) { + parameter_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + parameterBuilder_.clear(); + } + return this; + } + public Builder removeParameter(int index) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.remove(index); + onChanged(); + } else { + parameterBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getParameterBuilder( + int index) { + return getParameterFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( + int index) { + if (parameterBuilder_ == null) { + return parameter_.get(index); } else { + return parameterBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getParameterOrBuilderList() { + if (parameterBuilder_ != null) { + return parameterBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(parameter_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder() { + return getParameterFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder( + int index) { + return getParameterFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getParameterBuilderList() { + return getParameterFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getParameterFieldBuilder() { + if (parameterBuilder_ == null) { + parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + parameter_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + parameter_ = null; + } + return parameterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Exec) + } + + static { + defaultInstance = new Exec(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Exec) + } + + public interface ExecCoprocessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Exec call = 2; + boolean hasCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder(); + } + public static final class ExecCoprocessorRequest extends + com.google.protobuf.GeneratedMessage + implements ExecCoprocessorRequestOrBuilder { + // Use ExecCoprocessorRequest.newBuilder() to construct. + private ExecCoprocessorRequest(Builder builder) { + super(builder); + } + private ExecCoprocessorRequest(boolean noInit) {} + + private static final ExecCoprocessorRequest defaultInstance; + public static ExecCoprocessorRequest getDefaultInstance() { + return defaultInstance; + } + + public ExecCoprocessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Exec call = 2; + public static final int CALL_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { + return call_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { + return call_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCall()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCall().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, call_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, call_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasCall() == other.hasCall()); + if (hasCall()) { + result = result && getCall() + .equals(other.getCall()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasCall()) { + hash = (37 * hash) + CALL_FIELD_NUMBER; + hash = (53 * hash) + getCall().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getCallFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (callBuilder_ == null) { + result.call_ = call_; + } else { + result.call_ = callBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasCall()) { + mergeCall(other.getCall()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasCall()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getCall().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(); + if (hasCall()) { + subBuilder.mergeFrom(getCall()); + } + input.readMessage(subBuilder, extensionRegistry); + setCall(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Exec call = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> callBuilder_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { + if (callBuilder_ == null) { + return call_; + } else { + return callBuilder_.getMessage(); + } + } + public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { + if (callBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + call_ = value; + onChanged(); + } else { + callBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setCall( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) { + if (callBuilder_ == null) { + call_ = builderForValue.build(); + onChanged(); + } else { + callBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { + if (callBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) { + call_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); + } else { + call_ = value; + } + onChanged(); + } else { + callBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearCall() { + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + onChanged(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getCallBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getCallFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { + if (callBuilder_ != null) { + return callBuilder_.getMessageOrBuilder(); + } else { + return call_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> + getCallFieldBuilder() { + if (callBuilder_ == null) { + callBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>( + call_, + getParentForChildren(), + isClean()); + call_ = null; + } + return callBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ExecCoprocessorRequest) + } + + static { + defaultInstance = new ExecCoprocessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExecCoprocessorRequest) + } + + public interface ExecCoprocessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .NameBytesPair value = 1; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + } + public static final class ExecCoprocessorResponse extends + com.google.protobuf.GeneratedMessage + implements ExecCoprocessorResponseOrBuilder { + // Use ExecCoprocessorResponse.newBuilder() to construct. + private ExecCoprocessorResponse(Builder builder) { + super(builder); + } + private ExecCoprocessorResponse(boolean noInit) {} + + private static final ExecCoprocessorResponse defaultInstance; + public static ExecCoprocessorResponse getDefaultInstance() { + return defaultInstance; + } + + public ExecCoprocessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .NameBytesPair value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + return value_; + } + + private void initFields() { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; + if (other.hasValue()) { + mergeValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasValue()) { + + return false; + } + if (!getValue().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .NameBytesPair value = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ExecCoprocessorResponse) + } + + static { + defaultInstance = new ExecCoprocessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse) + } + + public interface CoprocessorServiceCallOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required string serviceName = 2; + boolean hasServiceName(); + String getServiceName(); + + // required string methodName = 3; + boolean hasMethodName(); + String getMethodName(); + + // required bytes request = 4; + boolean hasRequest(); + com.google.protobuf.ByteString getRequest(); + } + public static final class CoprocessorServiceCall extends + com.google.protobuf.GeneratedMessage + implements CoprocessorServiceCallOrBuilder { + // Use CoprocessorServiceCall.newBuilder() to construct. + private CoprocessorServiceCall(Builder builder) { + super(builder); + } + private CoprocessorServiceCall(boolean noInit) {} + + private static final CoprocessorServiceCall defaultInstance; + public static CoprocessorServiceCall getDefaultInstance() { + return defaultInstance; + } + + public CoprocessorServiceCall getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required string serviceName = 2; + public static final int SERVICENAME_FIELD_NUMBER = 2; + private java.lang.Object serviceName_; + public boolean hasServiceName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getServiceName() { + java.lang.Object ref = serviceName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + serviceName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getServiceNameBytes() { + java.lang.Object ref = serviceName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + serviceName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string methodName = 3; + public static final int METHODNAME_FIELD_NUMBER = 3; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + methodName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required bytes request = 4; + public static final int REQUEST_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString request_; + public boolean hasRequest() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getRequest() { + return request_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + serviceName_ = ""; + methodName_ = ""; + request_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServiceName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRequest()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getServiceNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, request_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getServiceNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, request_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasServiceName() == other.hasServiceName()); + if (hasServiceName()) { + result = result && getServiceName() + .equals(other.getServiceName()); + } + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && (hasRequest() == other.hasRequest()); + if (hasRequest()) { + result = result && getRequest() + .equals(other.getRequest()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasServiceName()) { + hash = (37 * hash) + SERVICENAME_FIELD_NUMBER; + hash = (53 * hash) + getServiceName().hashCode(); + } + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (hasRequest()) { + hash = (37 * hash) + REQUEST_FIELD_NUMBER; + hash = (53 * hash) + getRequest().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + serviceName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + request_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.serviceName_ = serviceName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.methodName_ = methodName_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.request_ = request_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasServiceName()) { + setServiceName(other.getServiceName()); + } + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (other.hasRequest()) { + setRequest(other.getRequest()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasServiceName()) { + + return false; + } + if (!hasMethodName()) { + + return false; + } + if (!hasRequest()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + serviceName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + request_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required string serviceName = 2; + private java.lang.Object serviceName_ = ""; + public boolean hasServiceName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getServiceName() { + java.lang.Object ref = serviceName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + serviceName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setServiceName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + serviceName_ = value; + onChanged(); + return this; + } + public Builder clearServiceName() { + bitField0_ = (bitField0_ & ~0x00000002); + serviceName_ = getDefaultInstance().getServiceName(); + onChanged(); + return this; + } + void setServiceName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + serviceName_ = value; + onChanged(); + } + + // required string methodName = 3; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000004); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + } + + // required bytes request = 4; + private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRequest() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getRequest() { + return request_; + } + public Builder setRequest(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + request_ = value; + onChanged(); + return this; + } + public Builder clearRequest() { + bitField0_ = (bitField0_ & ~0x00000008); + request_ = getDefaultInstance().getRequest(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CoprocessorServiceCall) + } + + static { + defaultInstance = new CoprocessorServiceCall(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CoprocessorServiceCall) + } + + public interface CoprocessorServiceRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .CoprocessorServiceCall call = 2; + boolean hasCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder(); + } + public static final class CoprocessorServiceRequest extends + com.google.protobuf.GeneratedMessage + implements CoprocessorServiceRequestOrBuilder { + // Use CoprocessorServiceRequest.newBuilder() to construct. + private CoprocessorServiceRequest(Builder builder) { + super(builder); + } + private CoprocessorServiceRequest(boolean noInit) {} + + private static final CoprocessorServiceRequest defaultInstance; + public static CoprocessorServiceRequest getDefaultInstance() { + return defaultInstance; + } + + public CoprocessorServiceRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .CoprocessorServiceCall call = 2; + public static final int CALL_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { + return call_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { + return call_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCall()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCall().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, call_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, call_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasCall() == other.hasCall()); + if (hasCall()) { + result = result && getCall() + .equals(other.getCall()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasCall()) { + hash = (37 * hash) + CALL_FIELD_NUMBER; + hash = (53 * hash) + getCall().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getCallFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (callBuilder_ == null) { + result.call_ = call_; + } else { + result.call_ = callBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasCall()) { + mergeCall(other.getCall()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasCall()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getCall().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(); + if (hasCall()) { + subBuilder.mergeFrom(getCall()); + } + input.readMessage(subBuilder, extensionRegistry); + setCall(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .CoprocessorServiceCall call = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { + if (callBuilder_ == null) { + return call_; + } else { + return callBuilder_.getMessage(); + } + } + public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { + if (callBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + call_ = value; + onChanged(); + } else { + callBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setCall( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) { + if (callBuilder_ == null) { + call_ = builderForValue.build(); + onChanged(); + } else { + callBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { + if (callBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) { + call_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial(); + } else { + call_ = value; + } + onChanged(); + } else { + callBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearCall() { + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + onChanged(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getCallFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { + if (callBuilder_ != null) { + return callBuilder_.getMessageOrBuilder(); + } else { + return call_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> + getCallFieldBuilder() { + if (callBuilder_ == null) { + callBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>( + call_, + getParentForChildren(), + isClean()); + call_ = null; + } + return callBuilder_; + } + + // @@protoc_insertion_point(builder_scope:CoprocessorServiceRequest) + } + + static { + defaultInstance = new CoprocessorServiceRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CoprocessorServiceRequest) + } + + public interface CoprocessorServiceResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .NameBytesPair value = 2; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + } + public static final class CoprocessorServiceResponse extends + com.google.protobuf.GeneratedMessage + implements CoprocessorServiceResponseOrBuilder { + // Use CoprocessorServiceResponse.newBuilder() to construct. + private CoprocessorServiceResponse(Builder builder) { + super(builder); + } + private CoprocessorServiceResponse(boolean noInit) {} + + private static final CoprocessorServiceResponse defaultInstance; + public static CoprocessorServiceResponse getDefaultInstance() { + return defaultInstance; + } + + public CoprocessorServiceResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .NameBytesPair value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + return value_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasValue()) { + mergeValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasValue()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getValue().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .NameBytesPair value = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:CoprocessorServiceResponse) + } + + static { + defaultInstance = new CoprocessorServiceResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CoprocessorServiceResponse) + } + + public interface MultiActionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Mutate mutate = 1; + boolean hasMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder(); + + // optional .Get get = 2; + boolean hasGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); + + // optional .Exec exec = 3; + boolean hasExec(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder(); + } + public static final class MultiAction extends + com.google.protobuf.GeneratedMessage + implements MultiActionOrBuilder { + // Use MultiAction.newBuilder() to construct. + private MultiAction(Builder builder) { + super(builder); + } + private MultiAction(boolean noInit) {} + + private static final MultiAction defaultInstance; + public static MultiAction getDefaultInstance() { + return defaultInstance; + } + + public MultiAction getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable; + } + + private int bitField0_; + // optional .Mutate mutate = 1; + public static final int MUTATE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { + return mutate_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { + return mutate_; + } + + // optional .Get get = 2; + public static final int GET_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { + return get_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { + return get_; + } + + // optional .Exec exec = 3; + public static final int EXEC_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec exec_; + public boolean hasExec() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec() { + return exec_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder() { + return exec_; + } + + private void initFields() { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasMutate()) { + if (!getMutate().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasGet()) { + if (!getGet().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasExec()) { + if (!getExec().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, mutate_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(3, exec_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, mutate_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, exec_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) obj; + + boolean result = true; + result = result && (hasMutate() == other.hasMutate()); + if (hasMutate()) { + result = result && getMutate() + .equals(other.getMutate()); + } + result = result && (hasGet() == other.hasGet()); + if (hasGet()) { + result = result && getGet() + .equals(other.getGet()); + } + result = result && (hasExec() == other.hasExec()); + if (hasExec()) { + result = result && getExec() + .equals(other.getExec()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasMutate()) { + hash = (37 * hash) + MUTATE_FIELD_NUMBER; + hash = (53 * hash) + getMutate().hashCode(); + } + if (hasGet()) { + hash = (37 * hash) + GET_FIELD_NUMBER; + hash = (53 * hash) + getGet().hashCode(); + } + if (hasExec()) { + hash = (37 * hash) + EXEC_FIELD_NUMBER; + hash = (53 * hash) + getExec().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMutateFieldBuilder(); + getGetFieldBuilder(); + getExecFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + if (execBuilder_ == null) { + exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } else { + execBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (mutateBuilder_ == null) { + result.mutate_ = mutate_; + } else { + result.mutate_ = mutateBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (getBuilder_ == null) { + result.get_ = get_; + } else { + result.get_ = getBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + if (execBuilder_ == null) { + result.exec_ = exec_; + } else { + result.exec_ = execBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()) return this; + if (other.hasMutate()) { + mergeMutate(other.getMutate()); + } + if (other.hasGet()) { + mergeGet(other.getGet()); + } + if (other.hasExec()) { + mergeExec(other.getExec()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasMutate()) { + if (!getMutate().isInitialized()) { + + return false; + } + } + if (hasGet()) { + if (!getGet().isInitialized()) { + + return false; + } + } + if (hasExec()) { + if (!getExec().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); + if (hasMutate()) { + subBuilder.mergeFrom(getMutate()); + } + input.readMessage(subBuilder, extensionRegistry); + setMutate(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); + if (hasGet()) { + subBuilder.mergeFrom(getGet()); + } + input.readMessage(subBuilder, extensionRegistry); + setGet(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(); + if (hasExec()) { + subBuilder.mergeFrom(getExec()); + } + input.readMessage(subBuilder, extensionRegistry); + setExec(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // optional .Mutate mutate = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutateBuilder_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { + if (mutateBuilder_ == null) { + return mutate_; + } else { + return mutateBuilder_.getMessage(); + } + } + public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + mutate_ = value; + onChanged(); + } else { + mutateBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setMutate( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { + if (mutateBuilder_ == null) { + mutate_ = builderForValue.build(); + onChanged(); + } else { + mutateBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + mutate_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) { + mutate_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); + } else { + mutate_ = value; + } + onChanged(); + } else { + mutateBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearMutate() { + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + onChanged(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutateBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getMutateFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { + if (mutateBuilder_ != null) { + return mutateBuilder_.getMessageOrBuilder(); + } else { + return mutate_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> + getMutateFieldBuilder() { + if (mutateBuilder_ == null) { + mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( + mutate_, + getParentForChildren(), + isClean()); + mutate_ = null; + } + return mutateBuilder_; + } + + // optional .Get get = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { + if (getBuilder_ == null) { + return get_; + } else { + return getBuilder_.getMessage(); + } + } + public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { + if (getBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + get_ = value; + onChanged(); + } else { + getBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setGet( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { + if (getBuilder_ == null) { + get_ = builderForValue.build(); + onChanged(); + } else { + getBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { + if (getBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { + get_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); + } else { + get_ = value; + } + onChanged(); + } else { + getBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearGet() { + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + onChanged(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getGetFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { + if (getBuilder_ != null) { + return getBuilder_.getMessageOrBuilder(); + } else { + return get_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> + getGetFieldBuilder() { + if (getBuilder_ == null) { + getBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( + get_, + getParentForChildren(), + isClean()); + get_ = null; + } + return getBuilder_; + } + + // optional .Exec exec = 3; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> execBuilder_; + public boolean hasExec() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec() { + if (execBuilder_ == null) { + return exec_; + } else { + return execBuilder_.getMessage(); + } + } + public Builder setExec(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { + if (execBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exec_ = value; + onChanged(); + } else { + execBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder setExec( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) { + if (execBuilder_ == null) { + exec_ = builderForValue.build(); + onChanged(); + } else { + execBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder mergeExec(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { + if (execBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + exec_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) { + exec_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(exec_).mergeFrom(value).buildPartial(); + } else { + exec_ = value; + } + onChanged(); + } else { + execBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder clearExec() { + if (execBuilder_ == null) { + exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + onChanged(); + } else { + execBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getExecBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getExecFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder() { + if (execBuilder_ != null) { + return execBuilder_.getMessageOrBuilder(); + } else { + return exec_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> + getExecFieldBuilder() { + if (execBuilder_ == null) { + execBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>( + exec_, + getParentForChildren(), + isClean()); + exec_ = null; + } + return execBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiAction) + } + + static { + defaultInstance = new MultiAction(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiAction) + } + + public interface ActionResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .NameBytesPair value = 1; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + + // optional .NameBytesPair exception = 2; + boolean hasException(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); + } + public static final class ActionResult extends + com.google.protobuf.GeneratedMessage + implements ActionResultOrBuilder { + // Use ActionResult.newBuilder() to construct. + private ActionResult(Builder builder) { + super(builder); + } + private ActionResult(boolean noInit) {} + + private static final ActionResult defaultInstance; + public static ActionResult getDefaultInstance() { + return defaultInstance; + } + + public ActionResult getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + } + + private int bitField0_; + // optional .NameBytesPair value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + return value_; + } + + // optional .NameBytesPair exception = 2; + public static final int EXCEPTION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + return exception_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + return exception_; + } + + private void initFields() { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasValue()) { + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, value_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, exception_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, value_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, exception_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + getExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()) return this; + if (other.hasValue()) { + mergeValue(other.getValue()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasValue()) { + if (!getValue().isInitialized()) { + + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasException()) { + subBuilder.mergeFrom(getException()); + } + input.readMessage(subBuilder, extensionRegistry); + setException(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // optional .NameBytesPair value = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // optional .NameBytesPair exception = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + if (exceptionBuilder_ == null) { + return exception_; + } else { + return exceptionBuilder_.getMessage(); + } + } + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ActionResult) + } + + static { + defaultInstance = new ActionResult(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ActionResult) + } + + public interface MultiRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated .MultiAction action = 2; + java.util.List + getActionList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index); + int getActionCount(); + java.util.List + getActionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( + int index); + + // optional bool atomic = 3; + boolean hasAtomic(); + boolean getAtomic(); + } + public static final class MultiRequest extends + com.google.protobuf.GeneratedMessage + implements MultiRequestOrBuilder { + // Use MultiRequest.newBuilder() to construct. + private MultiRequest(Builder builder) { + super(builder); + } + private MultiRequest(boolean noInit) {} + + private static final MultiRequest defaultInstance; + public static MultiRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated .MultiAction action = 2; + public static final int ACTION_FIELD_NUMBER = 2; + private java.util.List action_; + public java.util.List getActionList() { + return action_; + } + public java.util.List + getActionOrBuilderList() { + return action_; + } + public int getActionCount() { + return action_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { + return action_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( + int index) { + return action_.get(index); + } + + // optional bool atomic = 3; + public static final int ATOMIC_FIELD_NUMBER = 3; + private boolean atomic_; + public boolean hasAtomic() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getAtomic() { + return atomic_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + action_ = java.util.Collections.emptyList(); + atomic_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getActionCount(); i++) { + if (!getAction(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < action_.size(); i++) { + output.writeMessage(2, action_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, atomic_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + for (int i = 0; i < action_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, action_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, atomic_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getActionList() + .equals(other.getActionList()); + result = result && (hasAtomic() == other.hasAtomic()); + if (hasAtomic()) { + result = result && (getAtomic() + == other.getAtomic()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getActionCount() > 0) { + hash = (37 * hash) + ACTION_FIELD_NUMBER; + hash = (53 * hash) + getActionList().hashCode(); + } + if (hasAtomic()) { + hash = (37 * hash) + ATOMIC_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getAtomic()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getActionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (actionBuilder_ == null) { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + actionBuilder_.clear(); + } + atomic_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (actionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + action_ = java.util.Collections.unmodifiableList(action_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.action_ = action_; + } else { + result.action_ = actionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.atomic_ = atomic_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (actionBuilder_ == null) { + if (!other.action_.isEmpty()) { + if (action_.isEmpty()) { + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureActionIsMutable(); + action_.addAll(other.action_); + } + onChanged(); + } + } else { + if (!other.action_.isEmpty()) { + if (actionBuilder_.isEmpty()) { + actionBuilder_.dispose(); + actionBuilder_ = null; + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000002); + actionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getActionFieldBuilder() : null; + } else { + actionBuilder_.addAllMessages(other.action_); + } + } + } + if (other.hasAtomic()) { + setAtomic(other.getAtomic()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + for (int i = 0; i < getActionCount(); i++) { + if (!getAction(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAction(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + atomic_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated .MultiAction action = 2; + private java.util.List action_ = + java.util.Collections.emptyList(); + private void ensureActionIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + action_ = new java.util.ArrayList(action_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> actionBuilder_; + + public java.util.List getActionList() { + if (actionBuilder_ == null) { + return java.util.Collections.unmodifiableList(action_); + } else { + return actionBuilder_.getMessageList(); + } + } + public int getActionCount() { + if (actionBuilder_ == null) { + return action_.size(); + } else { + return actionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { + if (actionBuilder_ == null) { + return action_.get(index); + } else { + return actionBuilder_.getMessage(index); + } + } + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { + if (actionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.set(index, value); + onChanged(); + } else { + actionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.set(index, builderForValue.build()); + onChanged(); + } else { + actionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { + if (actionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.add(value); + onChanged(); + } else { + actionBuilder_.addMessage(value); + } + return this; + } + public Builder addAction( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { + if (actionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.add(index, value); + onChanged(); + } else { + actionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAction( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.add(builderForValue.build()); + onChanged(); + } else { + actionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAction( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.add(index, builderForValue.build()); + onChanged(); + } else { + actionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAction( + java.lang.Iterable values) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + super.addAll(values, action_); + onChanged(); + } else { + actionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAction() { + if (actionBuilder_ == null) { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + actionBuilder_.clear(); + } + return this; + } + public Builder removeAction(int index) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.remove(index); + onChanged(); + } else { + actionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder getActionBuilder( + int index) { + return getActionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( + int index) { + if (actionBuilder_ == null) { + return action_.get(index); } else { + return actionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getActionOrBuilderList() { + if (actionBuilder_ != null) { + return actionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(action_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder() { + return getActionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder( + int index) { + return getActionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); + } + public java.util.List + getActionBuilderList() { + return getActionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> + getActionFieldBuilder() { + if (actionBuilder_ == null) { + actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder>( + action_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + action_ = null; + } + return actionBuilder_; + } + + // optional bool atomic = 3; + private boolean atomic_ ; + public boolean hasAtomic() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getAtomic() { + return atomic_; + } + public Builder setAtomic(boolean value) { + bitField0_ |= 0x00000004; + atomic_ = value; + onChanged(); + return this; + } + public Builder clearAtomic() { + bitField0_ = (bitField0_ & ~0x00000004); + atomic_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MultiRequest) + } + + static { + defaultInstance = new MultiRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiRequest) + } + + public interface MultiResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .ActionResult result = 1; + java.util.List + getResultList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index); + int getResultCount(); + java.util.List + getResultOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + int index); + } + public static final class MultiResponse extends + com.google.protobuf.GeneratedMessage + implements MultiResponseOrBuilder { + // Use MultiResponse.newBuilder() to construct. + private MultiResponse(Builder builder) { + super(builder); + } + private MultiResponse(boolean noInit) {} + + private static final MultiResponse defaultInstance; + public static MultiResponse getDefaultInstance() { + return defaultInstance; + } + + public MultiResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; + } + + // repeated .ActionResult result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + public java.util.List getResultList() { + return result_; + } + public java.util.List + getResultOrBuilderList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { + return result_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this); + int from_bitField0_ = bitField0_; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addResult(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .ActionResult result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> resultBuilder_; + + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); + } + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiResponse) + } + + static { + defaultInstance = new MultiResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiResponse) + } + + public static abstract class ClientService + implements com.google.protobuf.Service { + protected ClientService() {} + + public interface Interface { + public abstract void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new ClientService() { + @java.lang.Override + public void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { + impl.get(controller, request, done); + } + + @java.lang.Override + public void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { + impl.mutate(controller, request, done); + } + + @java.lang.Override + public void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { + impl.scan(controller, request, done); + } + + @java.lang.Override + public void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.lockRow(controller, request, done); + } + + @java.lang.Override + public void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.unlockRow(controller, request, done); + } + + @java.lang.Override + public void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { + impl.bulkLoadHFile(controller, request, done); + } + + @java.lang.Override + public void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { + impl.execCoprocessor(controller, request, done); + } + + @java.lang.Override + public void execService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done) { + impl.execService(controller, request, done); + } + + @java.lang.Override + public void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { + impl.multi(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request); + case 1: + return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request); + case 2: + return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request); + case 3: + return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request); + case 4: + return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request); + case 5: + return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request); + case 6: + return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request); + case 7: + return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + case 8: + return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 8: + this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance())); + } + + public void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance())); + } + + public void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance())); + } + + public void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance())); + } + + public void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance())); + } + + public void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); + } + + public void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance())); + } + + public void execService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); + } + + public void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Column_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Column_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Get_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Get_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Result_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Result_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Condition_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Condition_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_ColumnValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_ColumnValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Scan_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Scan_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ScanRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ScanRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ScanResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ScanResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LockRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LockRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LockRowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LockRowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnlockRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnlockRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnlockRowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnlockRowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Exec_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Exec_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExecCoprocessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExecCoprocessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExecCoprocessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExecCoprocessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CoprocessorServiceCall_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CoprocessorServiceCall_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CoprocessorServiceRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CoprocessorServiceRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CoprocessorServiceResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CoprocessorServiceResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiAction_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiAction_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ActionResult_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ActionResult_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Client.proto\032\013hbase.proto\032\020Comparator." + + "proto\"+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tquali" + + "fier\030\002 \003(\014\"\362\001\n\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006colum" + + "n\030\002 \003(\0132\007.Column\022!\n\tattribute\030\003 \003(\0132\016.Na" + + "meBytesPair\022\016\n\006lockId\030\004 \001(\004\022\027\n\006filter\030\005 " + + "\001(\0132\007.Filter\022\035\n\ttimeRange\030\006 \001(\0132\n.TimeRa" + + "nge\022\026\n\013maxVersions\030\007 \001(\r:\0011\022\031\n\013cacheBloc" + + "ks\030\010 \001(\010:\004true\022\022\n\nstoreLimit\030\t \001(\r\022\023\n\013st" + + "oreOffset\030\n \001(\r\"\037\n\006Result\022\025\n\rkeyValueByt" + + "es\030\001 \003(\014\"r\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020", + ".RegionSpecifier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020c" + + "losestRowBefore\030\003 \001(\010\022\025\n\rexistenceOnly\030\004" + + " \001(\010\"6\n\013GetResponse\022\027\n\006result\030\001 \001(\0132\007.Re" + + "sult\022\016\n\006exists\030\002 \001(\010\"\177\n\tCondition\022\013\n\003row" + + "\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(" + + "\014\022!\n\013compareType\030\004 \002(\0162\014.CompareType\022\037\n\n" + + "comparator\030\005 \002(\0132\013.Comparator\"\306\004\n\006Mutate" + + "\022\013\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Muta" + + "te.MutateType\022(\n\013columnValue\030\003 \003(\0132\023.Mut" + + "ate.ColumnValue\022!\n\tattribute\030\004 \003(\0132\016.Nam", + "eBytesPair\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030" + + "\006 \001(\004\022\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeR" + + "ange\030\n \001(\0132\n.TimeRange\032\310\001\n\013ColumnValue\022\016" + + "\n\006family\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\"" + + ".Mutate.ColumnValue.QualifierValue\032m\n\016Qu" + + "alifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value" + + "\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022&\n\ndeleteType\030" + + "\004 \001(\0162\022.Mutate.DeleteType\"<\n\nMutateType\022" + + "\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006D" + + "ELETE\020\003\"U\n\nDeleteType\022\026\n\022DELETE_ONE_VERS", + "ION\020\000\022\034\n\030DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rD" + + "ELETE_FAMILY\020\002\"i\n\rMutateRequest\022 \n\006regio" + + "n\030\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030\002 \002(" + + "\0132\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Conditio" + + "n\"<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132\007.Re" + + "sult\022\021\n\tprocessed\030\002 \001(\010\"\243\002\n\004Scan\022\027\n\006colu" + + "mn\030\001 \003(\0132\007.Column\022!\n\tattribute\030\002 \003(\0132\016.N" + + "ameBytesPair\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRo" + + "w\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007.Filter\022\035\n\ttime" + + "Range\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersions\030\007", + " \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022\021\n\tba" + + "tchSize\030\t \001(\r\022\025\n\rmaxResultSize\030\n \001(\004\022\022\n\n" + + "storeLimit\030\013 \001(\r\022\023\n\013storeOffset\030\014 \001(\r\"\230\001" + + "\n\013ScanRequest\022 \n\006region\030\001 \001(\0132\020.RegionSp" + + "ecifier\022\023\n\004scan\030\002 \001(\0132\005.Scan\022\021\n\tscannerI" + + "d\030\003 \001(\004\022\024\n\014numberOfRows\030\004 \001(\r\022\024\n\014closeSc" + + "anner\030\005 \001(\010\022\023\n\013nextCallSeq\030\006 \001(\004\"\\\n\014Scan" + + "Response\022\027\n\006result\030\001 \003(\0132\007.Result\022\021\n\tsca" + + "nnerId\030\002 \001(\004\022\023\n\013moreResults\030\003 \001(\010\022\013\n\003ttl" + + "\030\004 \001(\r\"?\n\016LockRowRequest\022 \n\006region\030\001 \002(\013", + "2\020.RegionSpecifier\022\013\n\003row\030\002 \003(\014\".\n\017LockR" + + "owResponse\022\016\n\006lockId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"" + + "D\n\020UnlockRowRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\016\n\006lockId\030\002 \002(\004\"\023\n\021UnlockR" + + "owResponse\"\260\001\n\024BulkLoadHFileRequest\022 \n\006r" + + "egion\030\001 \002(\0132\020.RegionSpecifier\0224\n\nfamilyP" + + "ath\030\002 \003(\0132 .BulkLoadHFileRequest.FamilyP" + + "ath\022\024\n\014assignSeqNum\030\003 \001(\010\032*\n\nFamilyPath\022" + + "\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoa" + + "dHFileResponse\022\016\n\006loaded\030\001 \002(\010\"\203\001\n\004Exec\022", + "\013\n\003row\030\001 \002(\014\022\024\n\014protocolName\030\002 \002(\t\022\022\n\nme" + + "thodName\030\003 \002(\t\022!\n\010property\030\004 \003(\0132\017.NameS" + + "tringPair\022!\n\tparameter\030\005 \003(\0132\016.NameBytes" + + "Pair\"O\n\026ExecCoprocessorRequest\022 \n\006region" + + "\030\001 \002(\0132\020.RegionSpecifier\022\023\n\004call\030\002 \002(\0132\005" + + ".Exec\"8\n\027ExecCoprocessorResponse\022\035\n\005valu" + + "e\030\001 \002(\0132\016.NameBytesPair\"_\n\026CoprocessorSe" + + "rviceCall\022\013\n\003row\030\001 \002(\014\022\023\n\013serviceName\030\002 " + + "\002(\t\022\022\n\nmethodName\030\003 \002(\t\022\017\n\007request\030\004 \002(\014" + + "\"d\n\031CoprocessorServiceRequest\022 \n\006region\030", + "\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 \002(\0132\027." + + "CoprocessorServiceCall\"]\n\032CoprocessorSer" + + "viceResponse\022 \n\006region\030\001 \002(\0132\020.RegionSpe" + + "cifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesPair\"N\n" + + "\013MultiAction\022\027\n\006mutate\030\001 \001(\0132\007.Mutate\022\021\n" + + "\003get\030\002 \001(\0132\004.Get\022\023\n\004exec\030\003 \001(\0132\005.Exec\"P\n" + + "\014ActionResult\022\035\n\005value\030\001 \001(\0132\016.NameBytes" + + "Pair\022!\n\texception\030\002 \001(\0132\016.NameBytesPair\"" + + "^\n\014MultiRequest\022 \n\006region\030\001 \002(\0132\020.Region" + + "Specifier\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022", + "\016\n\006atomic\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006resu" + + "lt\030\001 \003(\0132\r.ActionResult2\331\003\n\rClientServic" + + "e\022 \n\003get\022\013.GetRequest\032\014.GetResponse\022)\n\006m" + + "utate\022\016.MutateRequest\032\017.MutateResponse\022#" + + "\n\004scan\022\014.ScanRequest\032\r.ScanResponse\022,\n\007l" + + "ockRow\022\017.LockRowRequest\032\020.LockRowRespons" + + "e\0222\n\tunlockRow\022\021.UnlockRowRequest\032\022.Unlo" + + "ckRowResponse\022>\n\rbulkLoadHFile\022\025.BulkLoa" + + "dHFileRequest\032\026.BulkLoadHFileResponse\022D\n" + + "\017execCoprocessor\022\027.ExecCoprocessorReques", + "t\032\030.ExecCoprocessorResponse\022F\n\013execServi" + + "ce\022\032.CoprocessorServiceRequest\032\033.Coproce" + + "ssorServiceResponse\022&\n\005multi\022\r.MultiRequ" + + "est\032\016.MultiResponseBB\n*org.apache.hadoop" + + ".hbase.protobuf.generatedB\014ClientProtosH" + + "\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_Column_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Column_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Column_descriptor, + new java.lang.String[] { "Family", "Qualifier", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); + internal_static_Get_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Get_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Get_descriptor, + new java.lang.String[] { "Row", "Column", "Attribute", "LockId", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); + internal_static_Result_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_Result_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Result_descriptor, + new java.lang.String[] { "KeyValueBytes", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); + internal_static_GetRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GetRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRequest_descriptor, + new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); + internal_static_GetResponse_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetResponse_descriptor, + new java.lang.String[] { "Result", "Exists", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); + internal_static_Condition_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_Condition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Condition_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); + internal_static_Mutate_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_Mutate_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_descriptor, + new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Attribute", "Timestamp", "LockId", "WriteToWAL", "TimeRange", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder.class); + internal_static_Mutate_ColumnValue_descriptor = + internal_static_Mutate_descriptor.getNestedTypes().get(0); + internal_static_Mutate_ColumnValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_ColumnValue_descriptor, + new java.lang.String[] { "Family", "QualifierValue", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder.class); + internal_static_Mutate_ColumnValue_QualifierValue_descriptor = + internal_static_Mutate_ColumnValue_descriptor.getNestedTypes().get(0); + internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_ColumnValue_QualifierValue_descriptor, + new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); + internal_static_MutateRequest_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_MutateRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateRequest_descriptor, + new java.lang.String[] { "Region", "Mutate", "Condition", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); + internal_static_MutateResponse_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_MutateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateResponse_descriptor, + new java.lang.String[] { "Result", "Processed", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); + internal_static_Scan_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_Scan_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Scan_descriptor, + new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); + internal_static_ScanRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_ScanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ScanRequest_descriptor, + new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); + internal_static_ScanResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_ScanResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ScanResponse_descriptor, + new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); + internal_static_LockRowRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_LockRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LockRowRequest_descriptor, + new java.lang.String[] { "Region", "Row", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.Builder.class); + internal_static_LockRowResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_LockRowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LockRowResponse_descriptor, + new java.lang.String[] { "LockId", "Ttl", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.Builder.class); + internal_static_UnlockRowRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_UnlockRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnlockRowRequest_descriptor, + new java.lang.String[] { "Region", "LockId", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.Builder.class); + internal_static_UnlockRowResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_UnlockRowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnlockRowResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.Builder.class); + internal_static_BulkLoadHFileRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_BulkLoadHFileRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileRequest_descriptor, + new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = + internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); + internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, + new java.lang.String[] { "Family", "Path", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); + internal_static_BulkLoadHFileResponse_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_BulkLoadHFileResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileResponse_descriptor, + new java.lang.String[] { "Loaded", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); + internal_static_Exec_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_Exec_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Exec_descriptor, + new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder.class); + internal_static_ExecCoprocessorRequest_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_ExecCoprocessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExecCoprocessorRequest_descriptor, + new java.lang.String[] { "Region", "Call", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.Builder.class); + internal_static_ExecCoprocessorResponse_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_ExecCoprocessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExecCoprocessorResponse_descriptor, + new java.lang.String[] { "Value", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.Builder.class); + internal_static_CoprocessorServiceCall_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_CoprocessorServiceCall_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CoprocessorServiceCall_descriptor, + new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); + internal_static_CoprocessorServiceRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_CoprocessorServiceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CoprocessorServiceRequest_descriptor, + new java.lang.String[] { "Region", "Call", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); + internal_static_CoprocessorServiceResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_CoprocessorServiceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CoprocessorServiceResponse_descriptor, + new java.lang.String[] { "Region", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); + internal_static_MultiAction_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_MultiAction_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiAction_descriptor, + new java.lang.String[] { "Mutate", "Get", "Exec", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); + internal_static_ActionResult_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_ActionResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ActionResult_descriptor, + new java.lang.String[] { "Value", "Exception", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); + internal_static_MultiRequest_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_MultiRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiRequest_descriptor, + new java.lang.String[] { "Region", "Action", "Atomic", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + internal_static_MultiResponse_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_MultiResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiResponse_descriptor, + new java.lang.String[] { "Result", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java new file mode 100644 index 0000000..aac3b80 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java @@ -0,0 +1,468 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ClusterId.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ClusterIdProtos { + private ClusterIdProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ClusterIdOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string clusterId = 1; + boolean hasClusterId(); + String getClusterId(); + } + public static final class ClusterId extends + com.google.protobuf.GeneratedMessage + implements ClusterIdOrBuilder { + // Use ClusterId.newBuilder() to construct. + private ClusterId(Builder builder) { + super(builder); + } + private ClusterId(boolean noInit) {} + + private static final ClusterId defaultInstance; + public static ClusterId getDefaultInstance() { + return defaultInstance; + } + + public ClusterId getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable; + } + + private int bitField0_; + // required string clusterId = 1; + public static final int CLUSTERID_FIELD_NUMBER = 1; + private java.lang.Object clusterId_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getClusterId() { + java.lang.Object ref = clusterId_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + clusterId_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getClusterIdBytes() { + java.lang.Object ref = clusterId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + clusterId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + clusterId_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClusterId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getClusterIdBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getClusterIdBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) obj; + + boolean result = true; + result = result && (hasClusterId() == other.hasClusterId()); + if (hasClusterId()) { + result = result && getClusterId() + .equals(other.getClusterId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClusterId()) { + hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; + hash = (53 * hash) + getClusterId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + clusterId_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId build() { + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.clusterId_ = clusterId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this; + if (other.hasClusterId()) { + setClusterId(other.getClusterId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClusterId()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + clusterId_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string clusterId = 1; + private java.lang.Object clusterId_ = ""; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getClusterId() { + java.lang.Object ref = clusterId_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + clusterId_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setClusterId(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + clusterId_ = value; + onChanged(); + return this; + } + public Builder clearClusterId() { + bitField0_ = (bitField0_ & ~0x00000001); + clusterId_ = getDefaultInstance().getClusterId(); + onChanged(); + return this; + } + void setClusterId(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + clusterId_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:ClusterId) + } + + static { + defaultInstance = new ClusterId(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ClusterId) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ClusterId_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ClusterId_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\017ClusterId.proto\"\036\n\tClusterId\022\021\n\tcluste" + + "rId\030\001 \002(\tBB\n*org.apache.hadoop.hbase.pro" + + "tobuf.generatedB\017ClusterIdProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_ClusterId_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_ClusterId_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ClusterId_descriptor, + new java.lang.String[] { "ClusterId", }, + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java new file mode 100644 index 0000000..a4c8509 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java @@ -0,0 +1,4426 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ClusterStatus.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ClusterStatusProtos { + private ClusterStatusProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RegionStateOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionInfo regionInfo = 1; + boolean hasRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); + + // required .RegionState.State state = 2; + boolean hasState(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); + + // optional uint64 stamp = 3; + boolean hasStamp(); + long getStamp(); + } + public static final class RegionState extends + com.google.protobuf.GeneratedMessage + implements RegionStateOrBuilder { + // Use RegionState.newBuilder() to construct. + private RegionState(Builder builder) { + super(builder); + } + private RegionState(boolean noInit) {} + + private static final RegionState defaultInstance; + public static RegionState getDefaultInstance() { + return defaultInstance; + } + + public RegionState getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; + } + + public enum State + implements com.google.protobuf.ProtocolMessageEnum { + OFFLINE(0, 0), + PENDING_OPEN(1, 1), + OPENING(2, 2), + OPEN(3, 3), + PENDING_CLOSE(4, 4), + CLOSING(5, 5), + CLOSED(6, 6), + SPLITTING(7, 7), + SPLIT(8, 8), + ; + + public static final int OFFLINE_VALUE = 0; + public static final int PENDING_OPEN_VALUE = 1; + public static final int OPENING_VALUE = 2; + public static final int OPEN_VALUE = 3; + public static final int PENDING_CLOSE_VALUE = 4; + public static final int CLOSING_VALUE = 5; + public static final int CLOSED_VALUE = 6; + public static final int SPLITTING_VALUE = 7; + public static final int SPLIT_VALUE = 8; + + + public final int getNumber() { return value; } + + public static State valueOf(int value) { + switch (value) { + case 0: return OFFLINE; + case 1: return PENDING_OPEN; + case 2: return OPENING; + case 3: return OPEN; + case 4: return PENDING_CLOSE; + case 5: return CLOSING; + case 6: return CLOSED; + case 7: return SPLITTING; + case 8: return SPLIT; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor().getEnumTypes().get(0); + } + + private static final State[] VALUES = { + OFFLINE, PENDING_OPEN, OPENING, OPEN, PENDING_CLOSE, CLOSING, CLOSED, SPLITTING, SPLIT, + }; + + public static State valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private State(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:RegionState.State) + } + + private int bitField0_; + // required .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + return regionInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + return regionInfo_; + } + + // required .RegionState.State state = 2; + public static final int STATE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_; + public boolean hasState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { + return state_; + } + + // optional uint64 stamp = 3; + public static final int STAMP_FIELD_NUMBER = 3; + private long stamp_; + public boolean hasStamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getStamp() { + return stamp_; + } + + private void initFields() { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + stamp_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionInfo()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasState()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegionInfo().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, regionInfo_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, state_.getNumber()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, stamp_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, state_.getNumber()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, stamp_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) obj; + + boolean result = true; + result = result && (hasRegionInfo() == other.hasRegionInfo()); + if (hasRegionInfo()) { + result = result && getRegionInfo() + .equals(other.getRegionInfo()); + } + result = result && (hasState() == other.hasState()); + if (hasState()) { + result = result && + (getState() == other.getState()); + } + result = result && (hasStamp() == other.hasStamp()); + if (hasStamp()) { + result = result && (getStamp() + == other.getStamp()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionInfo()) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfo().hashCode(); + } + if (hasState()) { + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getState()); + } + if (hasStamp()) { + hash = (37 * hash) + STAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getStamp()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + bitField0_ = (bitField0_ & ~0x00000002); + stamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState build() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionInfoBuilder_ == null) { + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.state_ = state_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.stamp_ = stamp_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) return this; + if (other.hasRegionInfo()) { + mergeRegionInfo(other.getRegionInfo()); + } + if (other.hasState()) { + setState(other.getState()); + } + if (other.hasStamp()) { + setStamp(other.getStamp()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionInfo()) { + + return false; + } + if (!hasState()) { + + return false; + } + if (!getRegionInfo().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + if (hasRegionInfo()) { + subBuilder.mergeFrom(getRegionInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegionInfo(subBuilder.buildPartial()); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + state_ = value; + } + break; + } + case 24: { + bitField0_ |= 0x00000004; + stamp_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionInfo regionInfo = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + if (regionInfoBuilder_ == null) { + return regionInfo_; + } else { + return regionInfoBuilder_.getMessage(); + } + } + public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + regionInfo_ = value; + onChanged(); + } else { + regionInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + regionInfo_ = builderForValue.build(); + onChanged(); + } else { + regionInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { + regionInfo_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); + } else { + regionInfo_ = value; + } + onChanged(); + } else { + regionInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilder(); + } else { + return regionInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // required .RegionState.State state = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + public boolean hasState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { + return state_; + } + public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + state_ = value; + onChanged(); + return this; + } + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000002); + state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + onChanged(); + return this; + } + + // optional uint64 stamp = 3; + private long stamp_ ; + public boolean hasStamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getStamp() { + return stamp_; + } + public Builder setStamp(long value) { + bitField0_ |= 0x00000004; + stamp_ = value; + onChanged(); + return this; + } + public Builder clearStamp() { + bitField0_ = (bitField0_ & ~0x00000004); + stamp_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionState) + } + + static { + defaultInstance = new RegionState(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionState) + } + + public interface RegionInTransitionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier spec = 1; + boolean hasSpec(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder(); + + // required .RegionState regionState = 2; + boolean hasRegionState(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder(); + } + public static final class RegionInTransition extends + com.google.protobuf.GeneratedMessage + implements RegionInTransitionOrBuilder { + // Use RegionInTransition.newBuilder() to construct. + private RegionInTransition(Builder builder) { + super(builder); + } + private RegionInTransition(boolean noInit) {} + + private static final RegionInTransition defaultInstance; + public static RegionInTransition getDefaultInstance() { + return defaultInstance; + } + + public RegionInTransition getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier spec = 1; + public static final int SPEC_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_; + public boolean hasSpec() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { + return spec_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { + return spec_; + } + + // required .RegionState regionState = 2; + public static final int REGIONSTATE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_; + public boolean hasRegionState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { + return regionState_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { + return regionState_; + } + + private void initFields() { + spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasSpec()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRegionState()) { + memoizedIsInitialized = 0; + return false; + } + if (!getSpec().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegionState().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, spec_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, regionState_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, spec_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, regionState_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) obj; + + boolean result = true; + result = result && (hasSpec() == other.hasSpec()); + if (hasSpec()) { + result = result && getSpec() + .equals(other.getSpec()); + } + result = result && (hasRegionState() == other.hasRegionState()); + if (hasRegionState()) { + result = result && getRegionState() + .equals(other.getRegionState()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSpec()) { + hash = (37 * hash) + SPEC_FIELD_NUMBER; + hash = (53 * hash) + getSpec().hashCode(); + } + if (hasRegionState()) { + hash = (37 * hash) + REGIONSTATE_FIELD_NUMBER; + hash = (53 * hash) + getRegionState().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getSpecFieldBuilder(); + getRegionStateFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (specBuilder_ == null) { + spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + specBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (regionStateBuilder_ == null) { + regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + } else { + regionStateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition build() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (specBuilder_ == null) { + result.spec_ = spec_; + } else { + result.spec_ = specBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (regionStateBuilder_ == null) { + result.regionState_ = regionState_; + } else { + result.regionState_ = regionStateBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()) return this; + if (other.hasSpec()) { + mergeSpec(other.getSpec()); + } + if (other.hasRegionState()) { + mergeRegionState(other.getRegionState()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasSpec()) { + + return false; + } + if (!hasRegionState()) { + + return false; + } + if (!getSpec().isInitialized()) { + + return false; + } + if (!getRegionState().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasSpec()) { + subBuilder.mergeFrom(getSpec()); + } + input.readMessage(subBuilder, extensionRegistry); + setSpec(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(); + if (hasRegionState()) { + subBuilder.mergeFrom(getRegionState()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegionState(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier spec = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_; + public boolean hasSpec() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { + if (specBuilder_ == null) { + return spec_; + } else { + return specBuilder_.getMessage(); + } + } + public Builder setSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (specBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + spec_ = value; + onChanged(); + } else { + specBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setSpec( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (specBuilder_ == null) { + spec_ = builderForValue.build(); + onChanged(); + } else { + specBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (specBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + spec_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + spec_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(spec_).mergeFrom(value).buildPartial(); + } else { + spec_ = value; + } + onChanged(); + } else { + specBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearSpec() { + if (specBuilder_ == null) { + spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + specBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getSpecBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getSpecFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { + if (specBuilder_ != null) { + return specBuilder_.getMessageOrBuilder(); + } else { + return spec_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getSpecFieldBuilder() { + if (specBuilder_ == null) { + specBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + spec_, + getParentForChildren(), + isClean()); + spec_ = null; + } + return specBuilder_; + } + + // required .RegionState regionState = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_; + public boolean hasRegionState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { + if (regionStateBuilder_ == null) { + return regionState_; + } else { + return regionStateBuilder_.getMessage(); + } + } + public Builder setRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { + if (regionStateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + regionState_ = value; + onChanged(); + } else { + regionStateBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setRegionState( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder builderForValue) { + if (regionStateBuilder_ == null) { + regionState_ = builderForValue.build(); + onChanged(); + } else { + regionStateBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { + if (regionStateBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + regionState_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) { + regionState_ = + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(regionState_).mergeFrom(value).buildPartial(); + } else { + regionState_ = value; + } + onChanged(); + } else { + regionStateBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearRegionState() { + if (regionStateBuilder_ == null) { + regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + onChanged(); + } else { + regionStateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder getRegionStateBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getRegionStateFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { + if (regionStateBuilder_ != null) { + return regionStateBuilder_.getMessageOrBuilder(); + } else { + return regionState_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> + getRegionStateFieldBuilder() { + if (regionStateBuilder_ == null) { + regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder>( + regionState_, + getParentForChildren(), + isClean()); + regionState_ = null; + } + return regionStateBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RegionInTransition) + } + + static { + defaultInstance = new RegionInTransition(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionInTransition) + } + + public interface LiveServerInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName server = 1; + boolean hasServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + + // required .ServerLoad serverLoad = 2; + boolean hasServerLoad(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder(); + } + public static final class LiveServerInfo extends + com.google.protobuf.GeneratedMessage + implements LiveServerInfoOrBuilder { + // Use LiveServerInfo.newBuilder() to construct. + private LiveServerInfo(Builder builder) { + super(builder); + } + private LiveServerInfo(boolean noInit) {} + + private static final LiveServerInfo defaultInstance; + public static LiveServerInfo getDefaultInstance() { + return defaultInstance; + } + + public LiveServerInfo getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName server = 1; + public static final int SERVER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + return server_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + return server_; + } + + // required .ServerLoad serverLoad = 2; + public static final int SERVERLOAD_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_; + public boolean hasServerLoad() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { + return serverLoad_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { + return serverLoad_; + } + + private void initFields() { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServer()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerLoad()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerLoad().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, serverLoad_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, serverLoad_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) obj; + + boolean result = true; + result = result && (hasServer() == other.hasServer()); + if (hasServer()) { + result = result && getServer() + .equals(other.getServer()); + } + result = result && (hasServerLoad() == other.hasServerLoad()); + if (hasServerLoad()) { + result = result && getServerLoad() + .equals(other.getServerLoad()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServer()) { + hash = (37 * hash) + SERVER_FIELD_NUMBER; + hash = (53 * hash) + getServer().hashCode(); + } + if (hasServerLoad()) { + hash = (37 * hash) + SERVERLOAD_FIELD_NUMBER; + hash = (53 * hash) + getServerLoad().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerFieldBuilder(); + getServerLoadFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (serverLoadBuilder_ == null) { + serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + } else { + serverLoadBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo build() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverBuilder_ == null) { + result.server_ = server_; + } else { + result.server_ = serverBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (serverLoadBuilder_ == null) { + result.serverLoad_ = serverLoad_; + } else { + result.serverLoad_ = serverLoadBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()) return this; + if (other.hasServer()) { + mergeServer(other.getServer()); + } + if (other.hasServerLoad()) { + mergeServerLoad(other.getServerLoad()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServer()) { + + return false; + } + if (!hasServerLoad()) { + + return false; + } + if (!getServer().isInitialized()) { + + return false; + } + if (!getServerLoad().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServer()) { + subBuilder.mergeFrom(getServer()); + } + input.readMessage(subBuilder, extensionRegistry); + setServer(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); + if (hasServerLoad()) { + subBuilder.mergeFrom(getServerLoad()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerLoad(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName server = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + if (serverBuilder_ == null) { + return server_; + } else { + return serverBuilder_.getMessage(); + } + } + public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + server_ = value; + onChanged(); + } else { + serverBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServer( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverBuilder_ == null) { + server_ = builderForValue.build(); + onChanged(); + } else { + serverBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + server_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); + } else { + server_ = value; + } + onChanged(); + } else { + serverBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServer() { + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + if (serverBuilder_ != null) { + return serverBuilder_.getMessageOrBuilder(); + } else { + return server_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerFieldBuilder() { + if (serverBuilder_ == null) { + serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + server_, + getParentForChildren(), + isClean()); + server_ = null; + } + return serverBuilder_; + } + + // required .ServerLoad serverLoad = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> serverLoadBuilder_; + public boolean hasServerLoad() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { + if (serverLoadBuilder_ == null) { + return serverLoad_; + } else { + return serverLoadBuilder_.getMessage(); + } + } + public Builder setServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { + if (serverLoadBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverLoad_ = value; + onChanged(); + } else { + serverLoadBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setServerLoad( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { + if (serverLoadBuilder_ == null) { + serverLoad_ = builderForValue.build(); + onChanged(); + } else { + serverLoadBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { + if (serverLoadBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + serverLoad_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) { + serverLoad_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(serverLoad_).mergeFrom(value).buildPartial(); + } else { + serverLoad_ = value; + } + onChanged(); + } else { + serverLoadBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearServerLoad() { + if (serverLoadBuilder_ == null) { + serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + onChanged(); + } else { + serverLoadBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getServerLoadBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getServerLoadFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { + if (serverLoadBuilder_ != null) { + return serverLoadBuilder_.getMessageOrBuilder(); + } else { + return serverLoad_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> + getServerLoadFieldBuilder() { + if (serverLoadBuilder_ == null) { + serverLoadBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder>( + serverLoad_, + getParentForChildren(), + isClean()); + serverLoad_ = null; + } + return serverLoadBuilder_; + } + + // @@protoc_insertion_point(builder_scope:LiveServerInfo) + } + + static { + defaultInstance = new LiveServerInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LiveServerInfo) + } + + public interface ClusterStatusOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .HBaseVersionFileContent hbaseVersion = 1; + boolean hasHbaseVersion(); + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion(); + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder(); + + // repeated .LiveServerInfo liveServers = 2; + java.util.List + getLiveServersList(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index); + int getLiveServersCount(); + java.util.List + getLiveServersOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( + int index); + + // repeated .ServerName deadServers = 3; + java.util.List + getDeadServersList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index); + int getDeadServersCount(); + java.util.List + getDeadServersOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( + int index); + + // repeated .RegionInTransition regionsInTransition = 4; + java.util.List + getRegionsInTransitionList(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index); + int getRegionsInTransitionCount(); + java.util.List + getRegionsInTransitionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( + int index); + + // optional .ClusterId clusterId = 5; + boolean hasClusterId(); + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId(); + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder(); + + // repeated .Coprocessor masterCoprocessors = 6; + java.util.List + getMasterCoprocessorsList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index); + int getMasterCoprocessorsCount(); + java.util.List + getMasterCoprocessorsOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( + int index); + + // optional .ServerName master = 7; + boolean hasMaster(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); + + // repeated .ServerName backupMasters = 8; + java.util.List + getBackupMastersList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index); + int getBackupMastersCount(); + java.util.List + getBackupMastersOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( + int index); + + // optional bool balancerOn = 9; + boolean hasBalancerOn(); + boolean getBalancerOn(); + } + public static final class ClusterStatus extends + com.google.protobuf.GeneratedMessage + implements ClusterStatusOrBuilder { + // Use ClusterStatus.newBuilder() to construct. + private ClusterStatus(Builder builder) { + super(builder); + } + private ClusterStatus(boolean noInit) {} + + private static final ClusterStatus defaultInstance; + public static ClusterStatus getDefaultInstance() { + return defaultInstance; + } + + public ClusterStatus getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; + } + + private int bitField0_; + // optional .HBaseVersionFileContent hbaseVersion = 1; + public static final int HBASEVERSION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_; + public boolean hasHbaseVersion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { + return hbaseVersion_; + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { + return hbaseVersion_; + } + + // repeated .LiveServerInfo liveServers = 2; + public static final int LIVESERVERS_FIELD_NUMBER = 2; + private java.util.List liveServers_; + public java.util.List getLiveServersList() { + return liveServers_; + } + public java.util.List + getLiveServersOrBuilderList() { + return liveServers_; + } + public int getLiveServersCount() { + return liveServers_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { + return liveServers_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( + int index) { + return liveServers_.get(index); + } + + // repeated .ServerName deadServers = 3; + public static final int DEADSERVERS_FIELD_NUMBER = 3; + private java.util.List deadServers_; + public java.util.List getDeadServersList() { + return deadServers_; + } + public java.util.List + getDeadServersOrBuilderList() { + return deadServers_; + } + public int getDeadServersCount() { + return deadServers_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { + return deadServers_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( + int index) { + return deadServers_.get(index); + } + + // repeated .RegionInTransition regionsInTransition = 4; + public static final int REGIONSINTRANSITION_FIELD_NUMBER = 4; + private java.util.List regionsInTransition_; + public java.util.List getRegionsInTransitionList() { + return regionsInTransition_; + } + public java.util.List + getRegionsInTransitionOrBuilderList() { + return regionsInTransition_; + } + public int getRegionsInTransitionCount() { + return regionsInTransition_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { + return regionsInTransition_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( + int index) { + return regionsInTransition_.get(index); + } + + // optional .ClusterId clusterId = 5; + public static final int CLUSTERID_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { + return clusterId_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { + return clusterId_; + } + + // repeated .Coprocessor masterCoprocessors = 6; + public static final int MASTERCOPROCESSORS_FIELD_NUMBER = 6; + private java.util.List masterCoprocessors_; + public java.util.List getMasterCoprocessorsList() { + return masterCoprocessors_; + } + public java.util.List + getMasterCoprocessorsOrBuilderList() { + return masterCoprocessors_; + } + public int getMasterCoprocessorsCount() { + return masterCoprocessors_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { + return masterCoprocessors_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( + int index) { + return masterCoprocessors_.get(index); + } + + // optional .ServerName master = 7; + public static final int MASTER_FIELD_NUMBER = 7; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; + public boolean hasMaster() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { + return master_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { + return master_; + } + + // repeated .ServerName backupMasters = 8; + public static final int BACKUPMASTERS_FIELD_NUMBER = 8; + private java.util.List backupMasters_; + public java.util.List getBackupMastersList() { + return backupMasters_; + } + public java.util.List + getBackupMastersOrBuilderList() { + return backupMasters_; + } + public int getBackupMastersCount() { + return backupMasters_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { + return backupMasters_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( + int index) { + return backupMasters_.get(index); + } + + // optional bool balancerOn = 9; + public static final int BALANCERON_FIELD_NUMBER = 9; + private boolean balancerOn_; + public boolean hasBalancerOn() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getBalancerOn() { + return balancerOn_; + } + + private void initFields() { + hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + liveServers_ = java.util.Collections.emptyList(); + deadServers_ = java.util.Collections.emptyList(); + regionsInTransition_ = java.util.Collections.emptyList(); + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + masterCoprocessors_ = java.util.Collections.emptyList(); + master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + backupMasters_ = java.util.Collections.emptyList(); + balancerOn_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasHbaseVersion()) { + if (!getHbaseVersion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getLiveServersCount(); i++) { + if (!getLiveServers(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getDeadServersCount(); i++) { + if (!getDeadServers(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getRegionsInTransitionCount(); i++) { + if (!getRegionsInTransition(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getMasterCoprocessorsCount(); i++) { + if (!getMasterCoprocessors(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasMaster()) { + if (!getMaster().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getBackupMastersCount(); i++) { + if (!getBackupMasters(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, hbaseVersion_); + } + for (int i = 0; i < liveServers_.size(); i++) { + output.writeMessage(2, liveServers_.get(i)); + } + for (int i = 0; i < deadServers_.size(); i++) { + output.writeMessage(3, deadServers_.get(i)); + } + for (int i = 0; i < regionsInTransition_.size(); i++) { + output.writeMessage(4, regionsInTransition_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(5, clusterId_); + } + for (int i = 0; i < masterCoprocessors_.size(); i++) { + output.writeMessage(6, masterCoprocessors_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(7, master_); + } + for (int i = 0; i < backupMasters_.size(); i++) { + output.writeMessage(8, backupMasters_.get(i)); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(9, balancerOn_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, hbaseVersion_); + } + for (int i = 0; i < liveServers_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, liveServers_.get(i)); + } + for (int i = 0; i < deadServers_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, deadServers_.get(i)); + } + for (int i = 0; i < regionsInTransition_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, regionsInTransition_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, clusterId_); + } + for (int i = 0; i < masterCoprocessors_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, masterCoprocessors_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, master_); + } + for (int i = 0; i < backupMasters_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, backupMasters_.get(i)); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(9, balancerOn_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) obj; + + boolean result = true; + result = result && (hasHbaseVersion() == other.hasHbaseVersion()); + if (hasHbaseVersion()) { + result = result && getHbaseVersion() + .equals(other.getHbaseVersion()); + } + result = result && getLiveServersList() + .equals(other.getLiveServersList()); + result = result && getDeadServersList() + .equals(other.getDeadServersList()); + result = result && getRegionsInTransitionList() + .equals(other.getRegionsInTransitionList()); + result = result && (hasClusterId() == other.hasClusterId()); + if (hasClusterId()) { + result = result && getClusterId() + .equals(other.getClusterId()); + } + result = result && getMasterCoprocessorsList() + .equals(other.getMasterCoprocessorsList()); + result = result && (hasMaster() == other.hasMaster()); + if (hasMaster()) { + result = result && getMaster() + .equals(other.getMaster()); + } + result = result && getBackupMastersList() + .equals(other.getBackupMastersList()); + result = result && (hasBalancerOn() == other.hasBalancerOn()); + if (hasBalancerOn()) { + result = result && (getBalancerOn() + == other.getBalancerOn()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasHbaseVersion()) { + hash = (37 * hash) + HBASEVERSION_FIELD_NUMBER; + hash = (53 * hash) + getHbaseVersion().hashCode(); + } + if (getLiveServersCount() > 0) { + hash = (37 * hash) + LIVESERVERS_FIELD_NUMBER; + hash = (53 * hash) + getLiveServersList().hashCode(); + } + if (getDeadServersCount() > 0) { + hash = (37 * hash) + DEADSERVERS_FIELD_NUMBER; + hash = (53 * hash) + getDeadServersList().hashCode(); + } + if (getRegionsInTransitionCount() > 0) { + hash = (37 * hash) + REGIONSINTRANSITION_FIELD_NUMBER; + hash = (53 * hash) + getRegionsInTransitionList().hashCode(); + } + if (hasClusterId()) { + hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; + hash = (53 * hash) + getClusterId().hashCode(); + } + if (getMasterCoprocessorsCount() > 0) { + hash = (37 * hash) + MASTERCOPROCESSORS_FIELD_NUMBER; + hash = (53 * hash) + getMasterCoprocessorsList().hashCode(); + } + if (hasMaster()) { + hash = (37 * hash) + MASTER_FIELD_NUMBER; + hash = (53 * hash) + getMaster().hashCode(); + } + if (getBackupMastersCount() > 0) { + hash = (37 * hash) + BACKUPMASTERS_FIELD_NUMBER; + hash = (53 * hash) + getBackupMastersList().hashCode(); + } + if (hasBalancerOn()) { + hash = (37 * hash) + BALANCERON_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getBalancerOn()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getHbaseVersionFieldBuilder(); + getLiveServersFieldBuilder(); + getDeadServersFieldBuilder(); + getRegionsInTransitionFieldBuilder(); + getClusterIdFieldBuilder(); + getMasterCoprocessorsFieldBuilder(); + getMasterFieldBuilder(); + getBackupMastersFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (hbaseVersionBuilder_ == null) { + hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + } else { + hbaseVersionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (liveServersBuilder_ == null) { + liveServers_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + liveServersBuilder_.clear(); + } + if (deadServersBuilder_ == null) { + deadServers_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + deadServersBuilder_.clear(); + } + if (regionsInTransitionBuilder_ == null) { + regionsInTransition_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + regionsInTransitionBuilder_.clear(); + } + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (masterCoprocessorsBuilder_ == null) { + masterCoprocessors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + masterCoprocessorsBuilder_.clear(); + } + if (masterBuilder_ == null) { + master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + masterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000040); + if (backupMastersBuilder_ == null) { + backupMasters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000080); + } else { + backupMastersBuilder_.clear(); + } + balancerOn_ = false; + bitField0_ = (bitField0_ & ~0x00000100); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus build() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (hbaseVersionBuilder_ == null) { + result.hbaseVersion_ = hbaseVersion_; + } else { + result.hbaseVersion_ = hbaseVersionBuilder_.build(); + } + if (liveServersBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + liveServers_ = java.util.Collections.unmodifiableList(liveServers_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.liveServers_ = liveServers_; + } else { + result.liveServers_ = liveServersBuilder_.build(); + } + if (deadServersBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + deadServers_ = java.util.Collections.unmodifiableList(deadServers_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.deadServers_ = deadServers_; + } else { + result.deadServers_ = deadServersBuilder_.build(); + } + if (regionsInTransitionBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + regionsInTransition_ = java.util.Collections.unmodifiableList(regionsInTransition_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.regionsInTransition_ = regionsInTransition_; + } else { + result.regionsInTransition_ = regionsInTransitionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000002; + } + if (clusterIdBuilder_ == null) { + result.clusterId_ = clusterId_; + } else { + result.clusterId_ = clusterIdBuilder_.build(); + } + if (masterCoprocessorsBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + masterCoprocessors_ = java.util.Collections.unmodifiableList(masterCoprocessors_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.masterCoprocessors_ = masterCoprocessors_; + } else { + result.masterCoprocessors_ = masterCoprocessorsBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000004; + } + if (masterBuilder_ == null) { + result.master_ = master_; + } else { + result.master_ = masterBuilder_.build(); + } + if (backupMastersBuilder_ == null) { + if (((bitField0_ & 0x00000080) == 0x00000080)) { + backupMasters_ = java.util.Collections.unmodifiableList(backupMasters_); + bitField0_ = (bitField0_ & ~0x00000080); + } + result.backupMasters_ = backupMasters_; + } else { + result.backupMasters_ = backupMastersBuilder_.build(); + } + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000008; + } + result.balancerOn_ = balancerOn_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) return this; + if (other.hasHbaseVersion()) { + mergeHbaseVersion(other.getHbaseVersion()); + } + if (liveServersBuilder_ == null) { + if (!other.liveServers_.isEmpty()) { + if (liveServers_.isEmpty()) { + liveServers_ = other.liveServers_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureLiveServersIsMutable(); + liveServers_.addAll(other.liveServers_); + } + onChanged(); + } + } else { + if (!other.liveServers_.isEmpty()) { + if (liveServersBuilder_.isEmpty()) { + liveServersBuilder_.dispose(); + liveServersBuilder_ = null; + liveServers_ = other.liveServers_; + bitField0_ = (bitField0_ & ~0x00000002); + liveServersBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getLiveServersFieldBuilder() : null; + } else { + liveServersBuilder_.addAllMessages(other.liveServers_); + } + } + } + if (deadServersBuilder_ == null) { + if (!other.deadServers_.isEmpty()) { + if (deadServers_.isEmpty()) { + deadServers_ = other.deadServers_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureDeadServersIsMutable(); + deadServers_.addAll(other.deadServers_); + } + onChanged(); + } + } else { + if (!other.deadServers_.isEmpty()) { + if (deadServersBuilder_.isEmpty()) { + deadServersBuilder_.dispose(); + deadServersBuilder_ = null; + deadServers_ = other.deadServers_; + bitField0_ = (bitField0_ & ~0x00000004); + deadServersBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getDeadServersFieldBuilder() : null; + } else { + deadServersBuilder_.addAllMessages(other.deadServers_); + } + } + } + if (regionsInTransitionBuilder_ == null) { + if (!other.regionsInTransition_.isEmpty()) { + if (regionsInTransition_.isEmpty()) { + regionsInTransition_ = other.regionsInTransition_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.addAll(other.regionsInTransition_); + } + onChanged(); + } + } else { + if (!other.regionsInTransition_.isEmpty()) { + if (regionsInTransitionBuilder_.isEmpty()) { + regionsInTransitionBuilder_.dispose(); + regionsInTransitionBuilder_ = null; + regionsInTransition_ = other.regionsInTransition_; + bitField0_ = (bitField0_ & ~0x00000008); + regionsInTransitionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionsInTransitionFieldBuilder() : null; + } else { + regionsInTransitionBuilder_.addAllMessages(other.regionsInTransition_); + } + } + } + if (other.hasClusterId()) { + mergeClusterId(other.getClusterId()); + } + if (masterCoprocessorsBuilder_ == null) { + if (!other.masterCoprocessors_.isEmpty()) { + if (masterCoprocessors_.isEmpty()) { + masterCoprocessors_ = other.masterCoprocessors_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.addAll(other.masterCoprocessors_); + } + onChanged(); + } + } else { + if (!other.masterCoprocessors_.isEmpty()) { + if (masterCoprocessorsBuilder_.isEmpty()) { + masterCoprocessorsBuilder_.dispose(); + masterCoprocessorsBuilder_ = null; + masterCoprocessors_ = other.masterCoprocessors_; + bitField0_ = (bitField0_ & ~0x00000020); + masterCoprocessorsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getMasterCoprocessorsFieldBuilder() : null; + } else { + masterCoprocessorsBuilder_.addAllMessages(other.masterCoprocessors_); + } + } + } + if (other.hasMaster()) { + mergeMaster(other.getMaster()); + } + if (backupMastersBuilder_ == null) { + if (!other.backupMasters_.isEmpty()) { + if (backupMasters_.isEmpty()) { + backupMasters_ = other.backupMasters_; + bitField0_ = (bitField0_ & ~0x00000080); + } else { + ensureBackupMastersIsMutable(); + backupMasters_.addAll(other.backupMasters_); + } + onChanged(); + } + } else { + if (!other.backupMasters_.isEmpty()) { + if (backupMastersBuilder_.isEmpty()) { + backupMastersBuilder_.dispose(); + backupMastersBuilder_ = null; + backupMasters_ = other.backupMasters_; + bitField0_ = (bitField0_ & ~0x00000080); + backupMastersBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getBackupMastersFieldBuilder() : null; + } else { + backupMastersBuilder_.addAllMessages(other.backupMasters_); + } + } + } + if (other.hasBalancerOn()) { + setBalancerOn(other.getBalancerOn()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasHbaseVersion()) { + if (!getHbaseVersion().isInitialized()) { + + return false; + } + } + for (int i = 0; i < getLiveServersCount(); i++) { + if (!getLiveServers(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getDeadServersCount(); i++) { + if (!getDeadServers(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getRegionsInTransitionCount(); i++) { + if (!getRegionsInTransition(i).isInitialized()) { + + return false; + } + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + + return false; + } + } + for (int i = 0; i < getMasterCoprocessorsCount(); i++) { + if (!getMasterCoprocessors(i).isInitialized()) { + + return false; + } + } + if (hasMaster()) { + if (!getMaster().isInitialized()) { + + return false; + } + } + for (int i = 0; i < getBackupMastersCount(); i++) { + if (!getBackupMasters(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(); + if (hasHbaseVersion()) { + subBuilder.mergeFrom(getHbaseVersion()); + } + input.readMessage(subBuilder, extensionRegistry); + setHbaseVersion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addLiveServers(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addDeadServers(subBuilder.buildPartial()); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegionsInTransition(subBuilder.buildPartial()); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(); + if (hasClusterId()) { + subBuilder.mergeFrom(getClusterId()); + } + input.readMessage(subBuilder, extensionRegistry); + setClusterId(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addMasterCoprocessors(subBuilder.buildPartial()); + break; + } + case 58: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasMaster()) { + subBuilder.mergeFrom(getMaster()); + } + input.readMessage(subBuilder, extensionRegistry); + setMaster(subBuilder.buildPartial()); + break; + } + case 66: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addBackupMasters(subBuilder.buildPartial()); + break; + } + case 72: { + bitField0_ |= 0x00000100; + balancerOn_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .HBaseVersionFileContent hbaseVersion = 1; + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> hbaseVersionBuilder_; + public boolean hasHbaseVersion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { + if (hbaseVersionBuilder_ == null) { + return hbaseVersion_; + } else { + return hbaseVersionBuilder_.getMessage(); + } + } + public Builder setHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { + if (hbaseVersionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + hbaseVersion_ = value; + onChanged(); + } else { + hbaseVersionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setHbaseVersion( + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder builderForValue) { + if (hbaseVersionBuilder_ == null) { + hbaseVersion_ = builderForValue.build(); + onChanged(); + } else { + hbaseVersionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { + if (hbaseVersionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + hbaseVersion_ != org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) { + hbaseVersion_ = + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(hbaseVersion_).mergeFrom(value).buildPartial(); + } else { + hbaseVersion_ = value; + } + onChanged(); + } else { + hbaseVersionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearHbaseVersion() { + if (hbaseVersionBuilder_ == null) { + hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + onChanged(); + } else { + hbaseVersionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder getHbaseVersionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getHbaseVersionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { + if (hbaseVersionBuilder_ != null) { + return hbaseVersionBuilder_.getMessageOrBuilder(); + } else { + return hbaseVersion_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> + getHbaseVersionFieldBuilder() { + if (hbaseVersionBuilder_ == null) { + hbaseVersionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder>( + hbaseVersion_, + getParentForChildren(), + isClean()); + hbaseVersion_ = null; + } + return hbaseVersionBuilder_; + } + + // repeated .LiveServerInfo liveServers = 2; + private java.util.List liveServers_ = + java.util.Collections.emptyList(); + private void ensureLiveServersIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + liveServers_ = new java.util.ArrayList(liveServers_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> liveServersBuilder_; + + public java.util.List getLiveServersList() { + if (liveServersBuilder_ == null) { + return java.util.Collections.unmodifiableList(liveServers_); + } else { + return liveServersBuilder_.getMessageList(); + } + } + public int getLiveServersCount() { + if (liveServersBuilder_ == null) { + return liveServers_.size(); + } else { + return liveServersBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { + if (liveServersBuilder_ == null) { + return liveServers_.get(index); + } else { + return liveServersBuilder_.getMessage(index); + } + } + public Builder setLiveServers( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { + if (liveServersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLiveServersIsMutable(); + liveServers_.set(index, value); + onChanged(); + } else { + liveServersBuilder_.setMessage(index, value); + } + return this; + } + public Builder setLiveServers( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { + if (liveServersBuilder_ == null) { + ensureLiveServersIsMutable(); + liveServers_.set(index, builderForValue.build()); + onChanged(); + } else { + liveServersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addLiveServers(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { + if (liveServersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLiveServersIsMutable(); + liveServers_.add(value); + onChanged(); + } else { + liveServersBuilder_.addMessage(value); + } + return this; + } + public Builder addLiveServers( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { + if (liveServersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLiveServersIsMutable(); + liveServers_.add(index, value); + onChanged(); + } else { + liveServersBuilder_.addMessage(index, value); + } + return this; + } + public Builder addLiveServers( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { + if (liveServersBuilder_ == null) { + ensureLiveServersIsMutable(); + liveServers_.add(builderForValue.build()); + onChanged(); + } else { + liveServersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addLiveServers( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { + if (liveServersBuilder_ == null) { + ensureLiveServersIsMutable(); + liveServers_.add(index, builderForValue.build()); + onChanged(); + } else { + liveServersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllLiveServers( + java.lang.Iterable values) { + if (liveServersBuilder_ == null) { + ensureLiveServersIsMutable(); + super.addAll(values, liveServers_); + onChanged(); + } else { + liveServersBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearLiveServers() { + if (liveServersBuilder_ == null) { + liveServers_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + liveServersBuilder_.clear(); + } + return this; + } + public Builder removeLiveServers(int index) { + if (liveServersBuilder_ == null) { + ensureLiveServersIsMutable(); + liveServers_.remove(index); + onChanged(); + } else { + liveServersBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder getLiveServersBuilder( + int index) { + return getLiveServersFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( + int index) { + if (liveServersBuilder_ == null) { + return liveServers_.get(index); } else { + return liveServersBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getLiveServersOrBuilderList() { + if (liveServersBuilder_ != null) { + return liveServersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(liveServers_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder() { + return getLiveServersFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder( + int index) { + return getLiveServersFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); + } + public java.util.List + getLiveServersBuilderList() { + return getLiveServersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> + getLiveServersFieldBuilder() { + if (liveServersBuilder_ == null) { + liveServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder>( + liveServers_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + liveServers_ = null; + } + return liveServersBuilder_; + } + + // repeated .ServerName deadServers = 3; + private java.util.List deadServers_ = + java.util.Collections.emptyList(); + private void ensureDeadServersIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + deadServers_ = new java.util.ArrayList(deadServers_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> deadServersBuilder_; + + public java.util.List getDeadServersList() { + if (deadServersBuilder_ == null) { + return java.util.Collections.unmodifiableList(deadServers_); + } else { + return deadServersBuilder_.getMessageList(); + } + } + public int getDeadServersCount() { + if (deadServersBuilder_ == null) { + return deadServers_.size(); + } else { + return deadServersBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { + if (deadServersBuilder_ == null) { + return deadServers_.get(index); + } else { + return deadServersBuilder_.getMessage(index); + } + } + public Builder setDeadServers( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (deadServersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDeadServersIsMutable(); + deadServers_.set(index, value); + onChanged(); + } else { + deadServersBuilder_.setMessage(index, value); + } + return this; + } + public Builder setDeadServers( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (deadServersBuilder_ == null) { + ensureDeadServersIsMutable(); + deadServers_.set(index, builderForValue.build()); + onChanged(); + } else { + deadServersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addDeadServers(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (deadServersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDeadServersIsMutable(); + deadServers_.add(value); + onChanged(); + } else { + deadServersBuilder_.addMessage(value); + } + return this; + } + public Builder addDeadServers( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (deadServersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDeadServersIsMutable(); + deadServers_.add(index, value); + onChanged(); + } else { + deadServersBuilder_.addMessage(index, value); + } + return this; + } + public Builder addDeadServers( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (deadServersBuilder_ == null) { + ensureDeadServersIsMutable(); + deadServers_.add(builderForValue.build()); + onChanged(); + } else { + deadServersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addDeadServers( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (deadServersBuilder_ == null) { + ensureDeadServersIsMutable(); + deadServers_.add(index, builderForValue.build()); + onChanged(); + } else { + deadServersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllDeadServers( + java.lang.Iterable values) { + if (deadServersBuilder_ == null) { + ensureDeadServersIsMutable(); + super.addAll(values, deadServers_); + onChanged(); + } else { + deadServersBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearDeadServers() { + if (deadServersBuilder_ == null) { + deadServers_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + deadServersBuilder_.clear(); + } + return this; + } + public Builder removeDeadServers(int index) { + if (deadServersBuilder_ == null) { + ensureDeadServersIsMutable(); + deadServers_.remove(index); + onChanged(); + } else { + deadServersBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDeadServersBuilder( + int index) { + return getDeadServersFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( + int index) { + if (deadServersBuilder_ == null) { + return deadServers_.get(index); } else { + return deadServersBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getDeadServersOrBuilderList() { + if (deadServersBuilder_ != null) { + return deadServersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(deadServers_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder() { + return getDeadServersFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder( + int index) { + return getDeadServersFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + public java.util.List + getDeadServersBuilderList() { + return getDeadServersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getDeadServersFieldBuilder() { + if (deadServersBuilder_ == null) { + deadServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + deadServers_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + deadServers_ = null; + } + return deadServersBuilder_; + } + + // repeated .RegionInTransition regionsInTransition = 4; + private java.util.List regionsInTransition_ = + java.util.Collections.emptyList(); + private void ensureRegionsInTransitionIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + regionsInTransition_ = new java.util.ArrayList(regionsInTransition_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> regionsInTransitionBuilder_; + + public java.util.List getRegionsInTransitionList() { + if (regionsInTransitionBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionsInTransition_); + } else { + return regionsInTransitionBuilder_.getMessageList(); + } + } + public int getRegionsInTransitionCount() { + if (regionsInTransitionBuilder_ == null) { + return regionsInTransition_.size(); + } else { + return regionsInTransitionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { + if (regionsInTransitionBuilder_ == null) { + return regionsInTransition_.get(index); + } else { + return regionsInTransitionBuilder_.getMessage(index); + } + } + public Builder setRegionsInTransition( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { + if (regionsInTransitionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.set(index, value); + onChanged(); + } else { + regionsInTransitionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegionsInTransition( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { + if (regionsInTransitionBuilder_ == null) { + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.set(index, builderForValue.build()); + onChanged(); + } else { + regionsInTransitionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegionsInTransition(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { + if (regionsInTransitionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.add(value); + onChanged(); + } else { + regionsInTransitionBuilder_.addMessage(value); + } + return this; + } + public Builder addRegionsInTransition( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { + if (regionsInTransitionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.add(index, value); + onChanged(); + } else { + regionsInTransitionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegionsInTransition( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { + if (regionsInTransitionBuilder_ == null) { + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.add(builderForValue.build()); + onChanged(); + } else { + regionsInTransitionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegionsInTransition( + int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { + if (regionsInTransitionBuilder_ == null) { + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.add(index, builderForValue.build()); + onChanged(); + } else { + regionsInTransitionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegionsInTransition( + java.lang.Iterable values) { + if (regionsInTransitionBuilder_ == null) { + ensureRegionsInTransitionIsMutable(); + super.addAll(values, regionsInTransition_); + onChanged(); + } else { + regionsInTransitionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegionsInTransition() { + if (regionsInTransitionBuilder_ == null) { + regionsInTransition_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + regionsInTransitionBuilder_.clear(); + } + return this; + } + public Builder removeRegionsInTransition(int index) { + if (regionsInTransitionBuilder_ == null) { + ensureRegionsInTransitionIsMutable(); + regionsInTransition_.remove(index); + onChanged(); + } else { + regionsInTransitionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder getRegionsInTransitionBuilder( + int index) { + return getRegionsInTransitionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( + int index) { + if (regionsInTransitionBuilder_ == null) { + return regionsInTransition_.get(index); } else { + return regionsInTransitionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionsInTransitionOrBuilderList() { + if (regionsInTransitionBuilder_ != null) { + return regionsInTransitionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(regionsInTransition_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder() { + return getRegionsInTransitionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder( + int index) { + return getRegionsInTransitionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); + } + public java.util.List + getRegionsInTransitionBuilderList() { + return getRegionsInTransitionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> + getRegionsInTransitionFieldBuilder() { + if (regionsInTransitionBuilder_ == null) { + regionsInTransitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder>( + regionsInTransition_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + regionsInTransition_ = null; + } + return regionsInTransitionBuilder_; + } + + // optional .ClusterId clusterId = 5; + private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> clusterIdBuilder_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { + if (clusterIdBuilder_ == null) { + return clusterId_; + } else { + return clusterIdBuilder_.getMessage(); + } + } + public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { + if (clusterIdBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + clusterId_ = value; + onChanged(); + } else { + clusterIdBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setClusterId( + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder builderForValue) { + if (clusterIdBuilder_ == null) { + clusterId_ = builderForValue.build(); + onChanged(); + } else { + clusterIdBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { + if (clusterIdBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) { + clusterId_ = + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(clusterId_).mergeFrom(value).buildPartial(); + } else { + clusterId_ = value; + } + onChanged(); + } else { + clusterIdBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearClusterId() { + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + onChanged(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder getClusterIdBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getClusterIdFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { + if (clusterIdBuilder_ != null) { + return clusterIdBuilder_.getMessageOrBuilder(); + } else { + return clusterId_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> + getClusterIdFieldBuilder() { + if (clusterIdBuilder_ == null) { + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder>( + clusterId_, + getParentForChildren(), + isClean()); + clusterId_ = null; + } + return clusterIdBuilder_; + } + + // repeated .Coprocessor masterCoprocessors = 6; + private java.util.List masterCoprocessors_ = + java.util.Collections.emptyList(); + private void ensureMasterCoprocessorsIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + masterCoprocessors_ = new java.util.ArrayList(masterCoprocessors_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> masterCoprocessorsBuilder_; + + public java.util.List getMasterCoprocessorsList() { + if (masterCoprocessorsBuilder_ == null) { + return java.util.Collections.unmodifiableList(masterCoprocessors_); + } else { + return masterCoprocessorsBuilder_.getMessageList(); + } + } + public int getMasterCoprocessorsCount() { + if (masterCoprocessorsBuilder_ == null) { + return masterCoprocessors_.size(); + } else { + return masterCoprocessorsBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { + if (masterCoprocessorsBuilder_ == null) { + return masterCoprocessors_.get(index); + } else { + return masterCoprocessorsBuilder_.getMessage(index); + } + } + public Builder setMasterCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (masterCoprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.set(index, value); + onChanged(); + } else { + masterCoprocessorsBuilder_.setMessage(index, value); + } + return this; + } + public Builder setMasterCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (masterCoprocessorsBuilder_ == null) { + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.set(index, builderForValue.build()); + onChanged(); + } else { + masterCoprocessorsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addMasterCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (masterCoprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.add(value); + onChanged(); + } else { + masterCoprocessorsBuilder_.addMessage(value); + } + return this; + } + public Builder addMasterCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (masterCoprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.add(index, value); + onChanged(); + } else { + masterCoprocessorsBuilder_.addMessage(index, value); + } + return this; + } + public Builder addMasterCoprocessors( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (masterCoprocessorsBuilder_ == null) { + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.add(builderForValue.build()); + onChanged(); + } else { + masterCoprocessorsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addMasterCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (masterCoprocessorsBuilder_ == null) { + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.add(index, builderForValue.build()); + onChanged(); + } else { + masterCoprocessorsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllMasterCoprocessors( + java.lang.Iterable values) { + if (masterCoprocessorsBuilder_ == null) { + ensureMasterCoprocessorsIsMutable(); + super.addAll(values, masterCoprocessors_); + onChanged(); + } else { + masterCoprocessorsBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearMasterCoprocessors() { + if (masterCoprocessorsBuilder_ == null) { + masterCoprocessors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + masterCoprocessorsBuilder_.clear(); + } + return this; + } + public Builder removeMasterCoprocessors(int index) { + if (masterCoprocessorsBuilder_ == null) { + ensureMasterCoprocessorsIsMutable(); + masterCoprocessors_.remove(index); + onChanged(); + } else { + masterCoprocessorsBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getMasterCoprocessorsBuilder( + int index) { + return getMasterCoprocessorsFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( + int index) { + if (masterCoprocessorsBuilder_ == null) { + return masterCoprocessors_.get(index); } else { + return masterCoprocessorsBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getMasterCoprocessorsOrBuilderList() { + if (masterCoprocessorsBuilder_ != null) { + return masterCoprocessorsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(masterCoprocessors_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder() { + return getMasterCoprocessorsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder( + int index) { + return getMasterCoprocessorsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); + } + public java.util.List + getMasterCoprocessorsBuilderList() { + return getMasterCoprocessorsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> + getMasterCoprocessorsFieldBuilder() { + if (masterCoprocessorsBuilder_ == null) { + masterCoprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( + masterCoprocessors_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + masterCoprocessors_ = null; + } + return masterCoprocessorsBuilder_; + } + + // optional .ServerName master = 7; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; + public boolean hasMaster() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { + if (masterBuilder_ == null) { + return master_; + } else { + return masterBuilder_.getMessage(); + } + } + public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (masterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + master_ = value; + onChanged(); + } else { + masterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000040; + return this; + } + public Builder setMaster( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (masterBuilder_ == null) { + master_ = builderForValue.build(); + onChanged(); + } else { + masterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000040; + return this; + } + public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (masterBuilder_ == null) { + if (((bitField0_ & 0x00000040) == 0x00000040) && + master_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + master_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); + } else { + master_ = value; + } + onChanged(); + } else { + masterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000040; + return this; + } + public Builder clearMaster() { + if (masterBuilder_ == null) { + master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + masterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000040); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { + bitField0_ |= 0x00000040; + onChanged(); + return getMasterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { + if (masterBuilder_ != null) { + return masterBuilder_.getMessageOrBuilder(); + } else { + return master_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getMasterFieldBuilder() { + if (masterBuilder_ == null) { + masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + master_, + getParentForChildren(), + isClean()); + master_ = null; + } + return masterBuilder_; + } + + // repeated .ServerName backupMasters = 8; + private java.util.List backupMasters_ = + java.util.Collections.emptyList(); + private void ensureBackupMastersIsMutable() { + if (!((bitField0_ & 0x00000080) == 0x00000080)) { + backupMasters_ = new java.util.ArrayList(backupMasters_); + bitField0_ |= 0x00000080; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> backupMastersBuilder_; + + public java.util.List getBackupMastersList() { + if (backupMastersBuilder_ == null) { + return java.util.Collections.unmodifiableList(backupMasters_); + } else { + return backupMastersBuilder_.getMessageList(); + } + } + public int getBackupMastersCount() { + if (backupMastersBuilder_ == null) { + return backupMasters_.size(); + } else { + return backupMastersBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { + if (backupMastersBuilder_ == null) { + return backupMasters_.get(index); + } else { + return backupMastersBuilder_.getMessage(index); + } + } + public Builder setBackupMasters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (backupMastersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureBackupMastersIsMutable(); + backupMasters_.set(index, value); + onChanged(); + } else { + backupMastersBuilder_.setMessage(index, value); + } + return this; + } + public Builder setBackupMasters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (backupMastersBuilder_ == null) { + ensureBackupMastersIsMutable(); + backupMasters_.set(index, builderForValue.build()); + onChanged(); + } else { + backupMastersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addBackupMasters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (backupMastersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureBackupMastersIsMutable(); + backupMasters_.add(value); + onChanged(); + } else { + backupMastersBuilder_.addMessage(value); + } + return this; + } + public Builder addBackupMasters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (backupMastersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureBackupMastersIsMutable(); + backupMasters_.add(index, value); + onChanged(); + } else { + backupMastersBuilder_.addMessage(index, value); + } + return this; + } + public Builder addBackupMasters( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (backupMastersBuilder_ == null) { + ensureBackupMastersIsMutable(); + backupMasters_.add(builderForValue.build()); + onChanged(); + } else { + backupMastersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addBackupMasters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (backupMastersBuilder_ == null) { + ensureBackupMastersIsMutable(); + backupMasters_.add(index, builderForValue.build()); + onChanged(); + } else { + backupMastersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllBackupMasters( + java.lang.Iterable values) { + if (backupMastersBuilder_ == null) { + ensureBackupMastersIsMutable(); + super.addAll(values, backupMasters_); + onChanged(); + } else { + backupMastersBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearBackupMasters() { + if (backupMastersBuilder_ == null) { + backupMasters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000080); + onChanged(); + } else { + backupMastersBuilder_.clear(); + } + return this; + } + public Builder removeBackupMasters(int index) { + if (backupMastersBuilder_ == null) { + ensureBackupMastersIsMutable(); + backupMasters_.remove(index); + onChanged(); + } else { + backupMastersBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getBackupMastersBuilder( + int index) { + return getBackupMastersFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( + int index) { + if (backupMastersBuilder_ == null) { + return backupMasters_.get(index); } else { + return backupMastersBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getBackupMastersOrBuilderList() { + if (backupMastersBuilder_ != null) { + return backupMastersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(backupMasters_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder() { + return getBackupMastersFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder( + int index) { + return getBackupMastersFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + public java.util.List + getBackupMastersBuilderList() { + return getBackupMastersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getBackupMastersFieldBuilder() { + if (backupMastersBuilder_ == null) { + backupMastersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + backupMasters_, + ((bitField0_ & 0x00000080) == 0x00000080), + getParentForChildren(), + isClean()); + backupMasters_ = null; + } + return backupMastersBuilder_; + } + + // optional bool balancerOn = 9; + private boolean balancerOn_ ; + public boolean hasBalancerOn() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public boolean getBalancerOn() { + return balancerOn_; + } + public Builder setBalancerOn(boolean value) { + bitField0_ |= 0x00000100; + balancerOn_ = value; + onChanged(); + return this; + } + public Builder clearBalancerOn() { + bitField0_ = (bitField0_ & ~0x00000100); + balancerOn_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ClusterStatus) + } + + static { + defaultInstance = new ClusterStatus(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ClusterStatus) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionState_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionState_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionInTransition_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionInTransition_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LiveServerInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LiveServerInfo_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ClusterStatus_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ClusterStatus_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\023ClusterStatus.proto\032\013hbase.proto\032\017Clus" + + "terId.proto\032\010FS.proto\"\346\001\n\013RegionState\022\037\n" + + "\nregionInfo\030\001 \002(\0132\013.RegionInfo\022!\n\005state\030" + + "\002 \002(\0162\022.RegionState.State\022\r\n\005stamp\030\003 \001(\004" + + "\"\203\001\n\005State\022\013\n\007OFFLINE\020\000\022\020\n\014PENDING_OPEN\020" + + "\001\022\013\n\007OPENING\020\002\022\010\n\004OPEN\020\003\022\021\n\rPENDING_CLOS" + + "E\020\004\022\013\n\007CLOSING\020\005\022\n\n\006CLOSED\020\006\022\r\n\tSPLITTIN" + + "G\020\007\022\t\n\005SPLIT\020\010\"W\n\022RegionInTransition\022\036\n\004" + + "spec\030\001 \002(\0132\020.RegionSpecifier\022!\n\013regionSt" + + "ate\030\002 \002(\0132\014.RegionState\"N\n\016LiveServerInf", + "o\022\033\n\006server\030\001 \002(\0132\013.ServerName\022\037\n\nserver" + + "Load\030\002 \002(\0132\013.ServerLoad\"\327\002\n\rClusterStatu" + + "s\022.\n\014hbaseVersion\030\001 \001(\0132\030.HBaseVersionFi" + + "leContent\022$\n\013liveServers\030\002 \003(\0132\017.LiveSer" + + "verInfo\022 \n\013deadServers\030\003 \003(\0132\013.ServerNam" + + "e\0220\n\023regionsInTransition\030\004 \003(\0132\023.RegionI" + + "nTransition\022\035\n\tclusterId\030\005 \001(\0132\n.Cluster" + + "Id\022(\n\022masterCoprocessors\030\006 \003(\0132\014.Coproce" + + "ssor\022\033\n\006master\030\007 \001(\0132\013.ServerName\022\"\n\rbac" + + "kupMasters\030\010 \003(\0132\013.ServerName\022\022\n\nbalance", + "rOn\030\t \001(\010BF\n*org.apache.hadoop.hbase.pro" + + "tobuf.generatedB\023ClusterStatusProtosH\001\240\001" + + "\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RegionState_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RegionState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionState_descriptor, + new java.lang.String[] { "RegionInfo", "State", "Stamp", }, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); + internal_static_RegionInTransition_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_RegionInTransition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionInTransition_descriptor, + new java.lang.String[] { "Spec", "RegionState", }, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); + internal_static_LiveServerInfo_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_LiveServerInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LiveServerInfo_descriptor, + new java.lang.String[] { "Server", "ServerLoad", }, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); + internal_static_ClusterStatus_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_ClusterStatus_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ClusterStatus_descriptor, + new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java new file mode 100644 index 0000000..983b57e --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java @@ -0,0 +1,3881 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Comparator.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ComparatorProtos { + private ComparatorProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // optional bytes serializedComparator = 2; + boolean hasSerializedComparator(); + com.google.protobuf.ByteString getSerializedComparator(); + } + public static final class Comparator extends + com.google.protobuf.GeneratedMessage + implements ComparatorOrBuilder { + // Use Comparator.newBuilder() to construct. + private Comparator(Builder builder) { + super(builder); + } + private Comparator(boolean noInit) {} + + private static final Comparator defaultInstance; + public static Comparator getDefaultInstance() { + return defaultInstance; + } + + public Comparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes serializedComparator = 2; + public static final int SERIALIZEDCOMPARATOR_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString serializedComparator_; + public boolean hasSerializedComparator() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSerializedComparator() { + return serializedComparator_; + } + + private void initFields() { + name_ = ""; + serializedComparator_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, serializedComparator_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, serializedComparator_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasSerializedComparator() == other.hasSerializedComparator()); + if (hasSerializedComparator()) { + result = result && getSerializedComparator() + .equals(other.getSerializedComparator()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasSerializedComparator()) { + hash = (37 * hash) + SERIALIZEDCOMPARATOR_FIELD_NUMBER; + hash = (53 * hash) + getSerializedComparator().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + serializedComparator_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.serializedComparator_ = serializedComparator_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasSerializedComparator()) { + setSerializedComparator(other.getSerializedComparator()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + serializedComparator_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // optional bytes serializedComparator = 2; + private com.google.protobuf.ByteString serializedComparator_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSerializedComparator() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSerializedComparator() { + return serializedComparator_; + } + public Builder setSerializedComparator(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + serializedComparator_ = value; + onChanged(); + return this; + } + public Builder clearSerializedComparator() { + bitField0_ = (bitField0_ & ~0x00000002); + serializedComparator_ = getDefaultInstance().getSerializedComparator(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Comparator) + } + + static { + defaultInstance = new Comparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Comparator) + } + + public interface ByteArrayComparableOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes value = 1; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class ByteArrayComparable extends + com.google.protobuf.GeneratedMessage + implements ByteArrayComparableOrBuilder { + // Use ByteArrayComparable.newBuilder() to construct. + private ByteArrayComparable(Builder builder) { + super(builder); + } + private ByteArrayComparable(boolean noInit) {} + + private static final ByteArrayComparable defaultInstance; + public static ByteArrayComparable getDefaultInstance() { + return defaultInstance; + } + + public ByteArrayComparable getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes value = 1; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ByteArrayComparable) + } + + static { + defaultInstance = new ByteArrayComparable(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ByteArrayComparable) + } + + public interface BinaryComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ByteArrayComparable comparable = 1; + boolean hasComparable(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); + } + public static final class BinaryComparator extends + com.google.protobuf.GeneratedMessage + implements BinaryComparatorOrBuilder { + // Use BinaryComparator.newBuilder() to construct. + private BinaryComparator(Builder builder) { + super(builder); + } + private BinaryComparator(boolean noInit) {} + + private static final BinaryComparator defaultInstance; + public static BinaryComparator getDefaultInstance() { + return defaultInstance; + } + + public BinaryComparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable; + } + + private int bitField0_; + // required .ByteArrayComparable comparable = 1; + public static final int COMPARABLE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + return comparable_; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + return comparable_; + } + + private void initFields() { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasComparable()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, comparable_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, comparable_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) obj; + + boolean result = true; + result = result && (hasComparable() == other.hasComparable()); + if (hasComparable()) { + result = result && getComparable() + .equals(other.getComparable()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasComparable()) { + hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; + hash = (53 * hash) + getComparable().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparableFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (comparableBuilder_ == null) { + result.comparable_ = comparable_; + } else { + result.comparable_ = comparableBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDefaultInstance()) return this; + if (other.hasComparable()) { + mergeComparable(other.getComparable()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasComparable()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); + if (hasComparable()) { + subBuilder.mergeFrom(getComparable()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparable(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ByteArrayComparable comparable = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + if (comparableBuilder_ == null) { + return comparable_; + } else { + return comparableBuilder_.getMessage(); + } + } + public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparable_ = value; + onChanged(); + } else { + comparableBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setComparable( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { + if (comparableBuilder_ == null) { + comparable_ = builderForValue.build(); + onChanged(); + } else { + comparableBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { + comparable_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); + } else { + comparable_ = value; + } + onChanged(); + } else { + comparableBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearComparable() { + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + onChanged(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getComparableFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + if (comparableBuilder_ != null) { + return comparableBuilder_.getMessageOrBuilder(); + } else { + return comparable_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> + getComparableFieldBuilder() { + if (comparableBuilder_ == null) { + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( + comparable_, + getParentForChildren(), + isClean()); + comparable_ = null; + } + return comparableBuilder_; + } + + // @@protoc_insertion_point(builder_scope:BinaryComparator) + } + + static { + defaultInstance = new BinaryComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BinaryComparator) + } + + public interface BinaryPrefixComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ByteArrayComparable comparable = 1; + boolean hasComparable(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); + } + public static final class BinaryPrefixComparator extends + com.google.protobuf.GeneratedMessage + implements BinaryPrefixComparatorOrBuilder { + // Use BinaryPrefixComparator.newBuilder() to construct. + private BinaryPrefixComparator(Builder builder) { + super(builder); + } + private BinaryPrefixComparator(boolean noInit) {} + + private static final BinaryPrefixComparator defaultInstance; + public static BinaryPrefixComparator getDefaultInstance() { + return defaultInstance; + } + + public BinaryPrefixComparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable; + } + + private int bitField0_; + // required .ByteArrayComparable comparable = 1; + public static final int COMPARABLE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + return comparable_; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + return comparable_; + } + + private void initFields() { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasComparable()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, comparable_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, comparable_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) obj; + + boolean result = true; + result = result && (hasComparable() == other.hasComparable()); + if (hasComparable()) { + result = result && getComparable() + .equals(other.getComparable()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasComparable()) { + hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; + hash = (53 * hash) + getComparable().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparableFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (comparableBuilder_ == null) { + result.comparable_ = comparable_; + } else { + result.comparable_ = comparableBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDefaultInstance()) return this; + if (other.hasComparable()) { + mergeComparable(other.getComparable()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasComparable()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); + if (hasComparable()) { + subBuilder.mergeFrom(getComparable()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparable(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ByteArrayComparable comparable = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + if (comparableBuilder_ == null) { + return comparable_; + } else { + return comparableBuilder_.getMessage(); + } + } + public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparable_ = value; + onChanged(); + } else { + comparableBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setComparable( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { + if (comparableBuilder_ == null) { + comparable_ = builderForValue.build(); + onChanged(); + } else { + comparableBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { + comparable_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); + } else { + comparable_ = value; + } + onChanged(); + } else { + comparableBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearComparable() { + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + onChanged(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getComparableFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + if (comparableBuilder_ != null) { + return comparableBuilder_.getMessageOrBuilder(); + } else { + return comparable_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> + getComparableFieldBuilder() { + if (comparableBuilder_ == null) { + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( + comparable_, + getParentForChildren(), + isClean()); + comparable_ = null; + } + return comparableBuilder_; + } + + // @@protoc_insertion_point(builder_scope:BinaryPrefixComparator) + } + + static { + defaultInstance = new BinaryPrefixComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BinaryPrefixComparator) + } + + public interface BitComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ByteArrayComparable comparable = 1; + boolean hasComparable(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); + + // required .BitComparator.BitwiseOp bitwiseOp = 2; + boolean hasBitwiseOp(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp(); + } + public static final class BitComparator extends + com.google.protobuf.GeneratedMessage + implements BitComparatorOrBuilder { + // Use BitComparator.newBuilder() to construct. + private BitComparator(Builder builder) { + super(builder); + } + private BitComparator(boolean noInit) {} + + private static final BitComparator defaultInstance; + public static BitComparator getDefaultInstance() { + return defaultInstance; + } + + public BitComparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable; + } + + public enum BitwiseOp + implements com.google.protobuf.ProtocolMessageEnum { + AND(0, 1), + OR(1, 2), + XOR(2, 3), + ; + + public static final int AND_VALUE = 1; + public static final int OR_VALUE = 2; + public static final int XOR_VALUE = 3; + + + public final int getNumber() { return value; } + + public static BitwiseOp valueOf(int value) { + switch (value) { + case 1: return AND; + case 2: return OR; + case 3: return XOR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public BitwiseOp findValueByNumber(int number) { + return BitwiseOp.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDescriptor().getEnumTypes().get(0); + } + + private static final BitwiseOp[] VALUES = { + AND, OR, XOR, + }; + + public static BitwiseOp valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private BitwiseOp(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:BitComparator.BitwiseOp) + } + + private int bitField0_; + // required .ByteArrayComparable comparable = 1; + public static final int COMPARABLE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + return comparable_; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + return comparable_; + } + + // required .BitComparator.BitwiseOp bitwiseOp = 2; + public static final int BITWISEOP_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_; + public boolean hasBitwiseOp() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { + return bitwiseOp_; + } + + private void initFields() { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasComparable()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasBitwiseOp()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, comparable_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, bitwiseOp_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, comparable_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, bitwiseOp_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) obj; + + boolean result = true; + result = result && (hasComparable() == other.hasComparable()); + if (hasComparable()) { + result = result && getComparable() + .equals(other.getComparable()); + } + result = result && (hasBitwiseOp() == other.hasBitwiseOp()); + if (hasBitwiseOp()) { + result = result && + (getBitwiseOp() == other.getBitwiseOp()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasComparable()) { + hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; + hash = (53 * hash) + getComparable().hashCode(); + } + if (hasBitwiseOp()) { + hash = (37 * hash) + BITWISEOP_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getBitwiseOp()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparableFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (comparableBuilder_ == null) { + result.comparable_ = comparable_; + } else { + result.comparable_ = comparableBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.bitwiseOp_ = bitwiseOp_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDefaultInstance()) return this; + if (other.hasComparable()) { + mergeComparable(other.getComparable()); + } + if (other.hasBitwiseOp()) { + setBitwiseOp(other.getBitwiseOp()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasComparable()) { + + return false; + } + if (!hasBitwiseOp()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); + if (hasComparable()) { + subBuilder.mergeFrom(getComparable()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparable(subBuilder.buildPartial()); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + bitwiseOp_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required .ByteArrayComparable comparable = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + if (comparableBuilder_ == null) { + return comparable_; + } else { + return comparableBuilder_.getMessage(); + } + } + public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparable_ = value; + onChanged(); + } else { + comparableBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setComparable( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { + if (comparableBuilder_ == null) { + comparable_ = builderForValue.build(); + onChanged(); + } else { + comparableBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { + comparable_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); + } else { + comparable_ = value; + } + onChanged(); + } else { + comparableBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearComparable() { + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + onChanged(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getComparableFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + if (comparableBuilder_ != null) { + return comparableBuilder_.getMessageOrBuilder(); + } else { + return comparable_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> + getComparableFieldBuilder() { + if (comparableBuilder_ == null) { + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( + comparable_, + getParentForChildren(), + isClean()); + comparable_ = null; + } + return comparableBuilder_; + } + + // required .BitComparator.BitwiseOp bitwiseOp = 2; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + public boolean hasBitwiseOp() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { + return bitwiseOp_; + } + public Builder setBitwiseOp(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + bitwiseOp_ = value; + onChanged(); + return this; + } + public Builder clearBitwiseOp() { + bitField0_ = (bitField0_ & ~0x00000002); + bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BitComparator) + } + + static { + defaultInstance = new BitComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BitComparator) + } + + public interface NullComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class NullComparator extends + com.google.protobuf.GeneratedMessage + implements NullComparatorOrBuilder { + // Use NullComparator.newBuilder() to construct. + private NullComparator(Builder builder) { + super(builder); + } + private NullComparator(boolean noInit) {} + + private static final NullComparator defaultInstance; + public static NullComparator getDefaultInstance() { + return defaultInstance; + } + + public NullComparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:NullComparator) + } + + static { + defaultInstance = new NullComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NullComparator) + } + + public interface RegexStringComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string pattern = 1; + boolean hasPattern(); + String getPattern(); + + // required int32 patternFlags = 2; + boolean hasPatternFlags(); + int getPatternFlags(); + + // required string charset = 3; + boolean hasCharset(); + String getCharset(); + } + public static final class RegexStringComparator extends + com.google.protobuf.GeneratedMessage + implements RegexStringComparatorOrBuilder { + // Use RegexStringComparator.newBuilder() to construct. + private RegexStringComparator(Builder builder) { + super(builder); + } + private RegexStringComparator(boolean noInit) {} + + private static final RegexStringComparator defaultInstance; + public static RegexStringComparator getDefaultInstance() { + return defaultInstance; + } + + public RegexStringComparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable; + } + + private int bitField0_; + // required string pattern = 1; + public static final int PATTERN_FIELD_NUMBER = 1; + private java.lang.Object pattern_; + public boolean hasPattern() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getPattern() { + java.lang.Object ref = pattern_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + pattern_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getPatternBytes() { + java.lang.Object ref = pattern_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + pattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required int32 patternFlags = 2; + public static final int PATTERNFLAGS_FIELD_NUMBER = 2; + private int patternFlags_; + public boolean hasPatternFlags() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getPatternFlags() { + return patternFlags_; + } + + // required string charset = 3; + public static final int CHARSET_FIELD_NUMBER = 3; + private java.lang.Object charset_; + public boolean hasCharset() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getCharset() { + java.lang.Object ref = charset_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + charset_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getCharsetBytes() { + java.lang.Object ref = charset_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + charset_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + pattern_ = ""; + patternFlags_ = 0; + charset_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPattern()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasPatternFlags()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCharset()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getPatternBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt32(2, patternFlags_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getCharsetBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getPatternBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, patternFlags_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getCharsetBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) obj; + + boolean result = true; + result = result && (hasPattern() == other.hasPattern()); + if (hasPattern()) { + result = result && getPattern() + .equals(other.getPattern()); + } + result = result && (hasPatternFlags() == other.hasPatternFlags()); + if (hasPatternFlags()) { + result = result && (getPatternFlags() + == other.getPatternFlags()); + } + result = result && (hasCharset() == other.hasCharset()); + if (hasCharset()) { + result = result && getCharset() + .equals(other.getCharset()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPattern()) { + hash = (37 * hash) + PATTERN_FIELD_NUMBER; + hash = (53 * hash) + getPattern().hashCode(); + } + if (hasPatternFlags()) { + hash = (37 * hash) + PATTERNFLAGS_FIELD_NUMBER; + hash = (53 * hash) + getPatternFlags(); + } + if (hasCharset()) { + hash = (37 * hash) + CHARSET_FIELD_NUMBER; + hash = (53 * hash) + getCharset().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + pattern_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + patternFlags_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + charset_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.pattern_ = pattern_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.patternFlags_ = patternFlags_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.charset_ = charset_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDefaultInstance()) return this; + if (other.hasPattern()) { + setPattern(other.getPattern()); + } + if (other.hasPatternFlags()) { + setPatternFlags(other.getPatternFlags()); + } + if (other.hasCharset()) { + setCharset(other.getCharset()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPattern()) { + + return false; + } + if (!hasPatternFlags()) { + + return false; + } + if (!hasCharset()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + pattern_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + patternFlags_ = input.readInt32(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + charset_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string pattern = 1; + private java.lang.Object pattern_ = ""; + public boolean hasPattern() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getPattern() { + java.lang.Object ref = pattern_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + pattern_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setPattern(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + pattern_ = value; + onChanged(); + return this; + } + public Builder clearPattern() { + bitField0_ = (bitField0_ & ~0x00000001); + pattern_ = getDefaultInstance().getPattern(); + onChanged(); + return this; + } + void setPattern(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + pattern_ = value; + onChanged(); + } + + // required int32 patternFlags = 2; + private int patternFlags_ ; + public boolean hasPatternFlags() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getPatternFlags() { + return patternFlags_; + } + public Builder setPatternFlags(int value) { + bitField0_ |= 0x00000002; + patternFlags_ = value; + onChanged(); + return this; + } + public Builder clearPatternFlags() { + bitField0_ = (bitField0_ & ~0x00000002); + patternFlags_ = 0; + onChanged(); + return this; + } + + // required string charset = 3; + private java.lang.Object charset_ = ""; + public boolean hasCharset() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getCharset() { + java.lang.Object ref = charset_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + charset_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setCharset(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + charset_ = value; + onChanged(); + return this; + } + public Builder clearCharset() { + bitField0_ = (bitField0_ & ~0x00000004); + charset_ = getDefaultInstance().getCharset(); + onChanged(); + return this; + } + void setCharset(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + charset_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:RegexStringComparator) + } + + static { + defaultInstance = new RegexStringComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegexStringComparator) + } + + public interface SubstringComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string substr = 1; + boolean hasSubstr(); + String getSubstr(); + } + public static final class SubstringComparator extends + com.google.protobuf.GeneratedMessage + implements SubstringComparatorOrBuilder { + // Use SubstringComparator.newBuilder() to construct. + private SubstringComparator(Builder builder) { + super(builder); + } + private SubstringComparator(boolean noInit) {} + + private static final SubstringComparator defaultInstance; + public static SubstringComparator getDefaultInstance() { + return defaultInstance; + } + + public SubstringComparator getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable; + } + + private int bitField0_; + // required string substr = 1; + public static final int SUBSTR_FIELD_NUMBER = 1; + private java.lang.Object substr_; + public boolean hasSubstr() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getSubstr() { + java.lang.Object ref = substr_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + substr_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getSubstrBytes() { + java.lang.Object ref = substr_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + substr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + substr_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasSubstr()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getSubstrBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getSubstrBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) obj; + + boolean result = true; + result = result && (hasSubstr() == other.hasSubstr()); + if (hasSubstr()) { + result = result && getSubstr() + .equals(other.getSubstr()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSubstr()) { + hash = (37 * hash) + SUBSTR_FIELD_NUMBER; + hash = (53 * hash) + getSubstr().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + substr_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.substr_ = substr_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDefaultInstance()) return this; + if (other.hasSubstr()) { + setSubstr(other.getSubstr()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasSubstr()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + substr_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string substr = 1; + private java.lang.Object substr_ = ""; + public boolean hasSubstr() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getSubstr() { + java.lang.Object ref = substr_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + substr_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setSubstr(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + substr_ = value; + onChanged(); + return this; + } + public Builder clearSubstr() { + bitField0_ = (bitField0_ & ~0x00000001); + substr_ = getDefaultInstance().getSubstr(); + onChanged(); + return this; + } + void setSubstr(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + substr_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:SubstringComparator) + } + + static { + defaultInstance = new SubstringComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SubstringComparator) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Comparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Comparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ByteArrayComparable_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ByteArrayComparable_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BinaryComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BinaryComparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BinaryPrefixComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BinaryPrefixComparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BitComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BitComparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NullComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NullComparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegexStringComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegexStringComparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SubstringComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SubstringComparator_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\020Comparator.proto\"8\n\nComparator\022\014\n\004name" + + "\030\001 \002(\t\022\034\n\024serializedComparator\030\002 \001(\014\"$\n\023" + + "ByteArrayComparable\022\r\n\005value\030\001 \001(\014\"<\n\020Bi" + + "naryComparator\022(\n\ncomparable\030\001 \002(\0132\024.Byt" + + "eArrayComparable\"B\n\026BinaryPrefixComparat" + + "or\022(\n\ncomparable\030\001 \002(\0132\024.ByteArrayCompar" + + "able\"\215\001\n\rBitComparator\022(\n\ncomparable\030\001 \002" + + "(\0132\024.ByteArrayComparable\022+\n\tbitwiseOp\030\002 " + + "\002(\0162\030.BitComparator.BitwiseOp\"%\n\tBitwise" + + "Op\022\007\n\003AND\020\001\022\006\n\002OR\020\002\022\007\n\003XOR\020\003\"\020\n\016NullComp", + "arator\"O\n\025RegexStringComparator\022\017\n\007patte" + + "rn\030\001 \002(\t\022\024\n\014patternFlags\030\002 \002(\005\022\017\n\007charse" + + "t\030\003 \002(\t\"%\n\023SubstringComparator\022\016\n\006substr" + + "\030\001 \002(\tBF\n*org.apache.hadoop.hbase.protob" + + "uf.generatedB\020ComparatorProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_Comparator_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Comparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Comparator_descriptor, + new java.lang.String[] { "Name", "SerializedComparator", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder.class); + internal_static_ByteArrayComparable_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_ByteArrayComparable_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ByteArrayComparable_descriptor, + new java.lang.String[] { "Value", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder.class); + internal_static_BinaryComparator_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_BinaryComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BinaryComparator_descriptor, + new java.lang.String[] { "Comparable", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.Builder.class); + internal_static_BinaryPrefixComparator_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_BinaryPrefixComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BinaryPrefixComparator_descriptor, + new java.lang.String[] { "Comparable", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.Builder.class); + internal_static_BitComparator_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_BitComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BitComparator_descriptor, + new java.lang.String[] { "Comparable", "BitwiseOp", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); + internal_static_NullComparator_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_NullComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NullComparator_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.Builder.class); + internal_static_RegexStringComparator_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_RegexStringComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegexStringComparator_descriptor, + new java.lang.String[] { "Pattern", "PatternFlags", "Charset", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.Builder.class); + internal_static_SubstringComparator_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_SubstringComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SubstringComparator_descriptor, + new java.lang.String[] { "Substr", }, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.class, + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java new file mode 100644 index 0000000..79d13c1 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java @@ -0,0 +1,1018 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: FS.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class FSProtos { + private FSProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface HBaseVersionFileContentOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string version = 1; + boolean hasVersion(); + String getVersion(); + } + public static final class HBaseVersionFileContent extends + com.google.protobuf.GeneratedMessage + implements HBaseVersionFileContentOrBuilder { + // Use HBaseVersionFileContent.newBuilder() to construct. + private HBaseVersionFileContent(Builder builder) { + super(builder); + } + private HBaseVersionFileContent(boolean noInit) {} + + private static final HBaseVersionFileContent defaultInstance; + public static HBaseVersionFileContent getDefaultInstance() { + return defaultInstance; + } + + public HBaseVersionFileContent getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; + } + + private int bitField0_; + // required string version = 1; + public static final int VERSION_FIELD_NUMBER = 1; + private java.lang.Object version_; + public boolean hasVersion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getVersion() { + java.lang.Object ref = version_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + version_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + version_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasVersion()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getVersionBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getVersionBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) obj; + + boolean result = true; + result = result && (hasVersion() == other.hasVersion()); + if (hasVersion()) { + result = result && getVersion() + .equals(other.getVersion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasVersion()) { + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + version_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent build() { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.version_ = version_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) return this; + if (other.hasVersion()) { + setVersion(other.getVersion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasVersion()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + version_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string version = 1; + private java.lang.Object version_ = ""; + public boolean hasVersion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getVersion() { + java.lang.Object ref = version_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + version_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setVersion(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + version_ = value; + onChanged(); + return this; + } + public Builder clearVersion() { + bitField0_ = (bitField0_ & ~0x00000001); + version_ = getDefaultInstance().getVersion(); + onChanged(); + return this; + } + void setVersion(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + version_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:HBaseVersionFileContent) + } + + static { + defaultInstance = new HBaseVersionFileContent(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HBaseVersionFileContent) + } + + public interface ReferenceOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes splitkey = 1; + boolean hasSplitkey(); + com.google.protobuf.ByteString getSplitkey(); + + // required .Reference.Range range = 2; + boolean hasRange(); + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange(); + } + public static final class Reference extends + com.google.protobuf.GeneratedMessage + implements ReferenceOrBuilder { + // Use Reference.newBuilder() to construct. + private Reference(Builder builder) { + super(builder); + } + private Reference(boolean noInit) {} + + private static final Reference defaultInstance; + public static Reference getDefaultInstance() { + return defaultInstance; + } + + public Reference getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable; + } + + public enum Range + implements com.google.protobuf.ProtocolMessageEnum { + TOP(0, 0), + BOTTOM(1, 1), + ; + + public static final int TOP_VALUE = 0; + public static final int BOTTOM_VALUE = 1; + + + public final int getNumber() { return value; } + + public static Range valueOf(int value) { + switch (value) { + case 0: return TOP; + case 1: return BOTTOM; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Range findValueByNumber(int number) { + return Range.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor().getEnumTypes().get(0); + } + + private static final Range[] VALUES = { + TOP, BOTTOM, + }; + + public static Range valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Range(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Reference.Range) + } + + private int bitField0_; + // required bytes splitkey = 1; + public static final int SPLITKEY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString splitkey_; + public boolean hasSplitkey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getSplitkey() { + return splitkey_; + } + + // required .Reference.Range range = 2; + public static final int RANGE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_; + public boolean hasRange() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() { + return range_; + } + + private void initFields() { + splitkey_ = com.google.protobuf.ByteString.EMPTY; + range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasSplitkey()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRange()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, splitkey_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, range_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, splitkey_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, range_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) obj; + + boolean result = true; + result = result && (hasSplitkey() == other.hasSplitkey()); + if (hasSplitkey()) { + result = result && getSplitkey() + .equals(other.getSplitkey()); + } + result = result && (hasRange() == other.hasRange()); + if (hasRange()) { + result = result && + (getRange() == other.getRange()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSplitkey()) { + hash = (37 * hash) + SPLITKEY_FIELD_NUMBER; + hash = (53 * hash) + getSplitkey().hashCode(); + } + if (hasRange()) { + hash = (37 * hash) + RANGE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getRange()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + splitkey_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference build() { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.splitkey_ = splitkey_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.range_ = range_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance()) return this; + if (other.hasSplitkey()) { + setSplitkey(other.getSplitkey()); + } + if (other.hasRange()) { + setRange(other.getRange()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasSplitkey()) { + + return false; + } + if (!hasRange()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + splitkey_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + range_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required bytes splitkey = 1; + private com.google.protobuf.ByteString splitkey_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSplitkey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getSplitkey() { + return splitkey_; + } + public Builder setSplitkey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + splitkey_ = value; + onChanged(); + return this; + } + public Builder clearSplitkey() { + bitField0_ = (bitField0_ & ~0x00000001); + splitkey_ = getDefaultInstance().getSplitkey(); + onChanged(); + return this; + } + + // required .Reference.Range range = 2; + private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; + public boolean hasRange() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() { + return range_; + } + public Builder setRange(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + range_ = value; + onChanged(); + return this; + } + public Builder clearRange() { + bitField0_ = (bitField0_ & ~0x00000002); + range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Reference) + } + + static { + defaultInstance = new Reference(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Reference) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HBaseVersionFileContent_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HBaseVersionFileContent_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Reference_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Reference_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\010FS.proto\"*\n\027HBaseVersionFileContent\022\017\n" + + "\007version\030\001 \002(\t\"\\\n\tReference\022\020\n\010splitkey\030" + + "\001 \002(\014\022\037\n\005range\030\002 \002(\0162\020.Reference.Range\"\034" + + "\n\005Range\022\007\n\003TOP\020\000\022\n\n\006BOTTOM\020\001B;\n*org.apac" + + "he.hadoop.hbase.protobuf.generatedB\010FSPr" + + "otosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_HBaseVersionFileContent_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_HBaseVersionFileContent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HBaseVersionFileContent_descriptor, + new java.lang.String[] { "Version", }, + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, + org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); + internal_static_Reference_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Reference_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Reference_descriptor, + new java.lang.String[] { "Splitkey", "Range", }, + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, + org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java new file mode 100644 index 0000000..8088751 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java @@ -0,0 +1,12942 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Filter.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class FilterProtos { + private FilterProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ColumnCountGetFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 limit = 1; + boolean hasLimit(); + int getLimit(); + } + public static final class ColumnCountGetFilter extends + com.google.protobuf.GeneratedMessage + implements ColumnCountGetFilterOrBuilder { + // Use ColumnCountGetFilter.newBuilder() to construct. + private ColumnCountGetFilter(Builder builder) { + super(builder); + } + private ColumnCountGetFilter(boolean noInit) {} + + private static final ColumnCountGetFilter defaultInstance; + public static ColumnCountGetFilter getDefaultInstance() { + return defaultInstance; + } + + public ColumnCountGetFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable; + } + + private int bitField0_; + // required int32 limit = 1; + public static final int LIMIT_FIELD_NUMBER = 1; + private int limit_; + public boolean hasLimit() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getLimit() { + return limit_; + } + + private void initFields() { + limit_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLimit()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, limit_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, limit_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj; + + boolean result = true; + result = result && (hasLimit() == other.hasLimit()); + if (hasLimit()) { + result = result && (getLimit() + == other.getLimit()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLimit()) { + hash = (37 * hash) + LIMIT_FIELD_NUMBER; + hash = (53 * hash) + getLimit(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + limit_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.limit_ = limit_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this; + if (other.hasLimit()) { + setLimit(other.getLimit()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLimit()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + limit_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required int32 limit = 1; + private int limit_ ; + public boolean hasLimit() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getLimit() { + return limit_; + } + public Builder setLimit(int value) { + bitField0_ |= 0x00000001; + limit_ = value; + onChanged(); + return this; + } + public Builder clearLimit() { + bitField0_ = (bitField0_ & ~0x00000001); + limit_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ColumnCountGetFilter) + } + + static { + defaultInstance = new ColumnCountGetFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ColumnCountGetFilter) + } + + public interface ColumnPaginationFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 limit = 1; + boolean hasLimit(); + int getLimit(); + + // optional int32 offset = 2; + boolean hasOffset(); + int getOffset(); + } + public static final class ColumnPaginationFilter extends + com.google.protobuf.GeneratedMessage + implements ColumnPaginationFilterOrBuilder { + // Use ColumnPaginationFilter.newBuilder() to construct. + private ColumnPaginationFilter(Builder builder) { + super(builder); + } + private ColumnPaginationFilter(boolean noInit) {} + + private static final ColumnPaginationFilter defaultInstance; + public static ColumnPaginationFilter getDefaultInstance() { + return defaultInstance; + } + + public ColumnPaginationFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable; + } + + private int bitField0_; + // required int32 limit = 1; + public static final int LIMIT_FIELD_NUMBER = 1; + private int limit_; + public boolean hasLimit() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getLimit() { + return limit_; + } + + // optional int32 offset = 2; + public static final int OFFSET_FIELD_NUMBER = 2; + private int offset_; + public boolean hasOffset() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getOffset() { + return offset_; + } + + private void initFields() { + limit_ = 0; + offset_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLimit()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, limit_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt32(2, offset_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, limit_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, offset_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj; + + boolean result = true; + result = result && (hasLimit() == other.hasLimit()); + if (hasLimit()) { + result = result && (getLimit() + == other.getLimit()); + } + result = result && (hasOffset() == other.hasOffset()); + if (hasOffset()) { + result = result && (getOffset() + == other.getOffset()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLimit()) { + hash = (37 * hash) + LIMIT_FIELD_NUMBER; + hash = (53 * hash) + getLimit(); + } + if (hasOffset()) { + hash = (37 * hash) + OFFSET_FIELD_NUMBER; + hash = (53 * hash) + getOffset(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + limit_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + offset_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.limit_ = limit_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.offset_ = offset_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this; + if (other.hasLimit()) { + setLimit(other.getLimit()); + } + if (other.hasOffset()) { + setOffset(other.getOffset()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLimit()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + limit_ = input.readInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + offset_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required int32 limit = 1; + private int limit_ ; + public boolean hasLimit() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getLimit() { + return limit_; + } + public Builder setLimit(int value) { + bitField0_ |= 0x00000001; + limit_ = value; + onChanged(); + return this; + } + public Builder clearLimit() { + bitField0_ = (bitField0_ & ~0x00000001); + limit_ = 0; + onChanged(); + return this; + } + + // optional int32 offset = 2; + private int offset_ ; + public boolean hasOffset() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getOffset() { + return offset_; + } + public Builder setOffset(int value) { + bitField0_ |= 0x00000002; + offset_ = value; + onChanged(); + return this; + } + public Builder clearOffset() { + bitField0_ = (bitField0_ & ~0x00000002); + offset_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ColumnPaginationFilter) + } + + static { + defaultInstance = new ColumnPaginationFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ColumnPaginationFilter) + } + + public interface ColumnPrefixFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes prefix = 1; + boolean hasPrefix(); + com.google.protobuf.ByteString getPrefix(); + } + public static final class ColumnPrefixFilter extends + com.google.protobuf.GeneratedMessage + implements ColumnPrefixFilterOrBuilder { + // Use ColumnPrefixFilter.newBuilder() to construct. + private ColumnPrefixFilter(Builder builder) { + super(builder); + } + private ColumnPrefixFilter(boolean noInit) {} + + private static final ColumnPrefixFilter defaultInstance; + public static ColumnPrefixFilter getDefaultInstance() { + return defaultInstance; + } + + public ColumnPrefixFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable; + } + + private int bitField0_; + // required bytes prefix = 1; + public static final int PREFIX_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString prefix_; + public boolean hasPrefix() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getPrefix() { + return prefix_; + } + + private void initFields() { + prefix_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPrefix()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, prefix_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, prefix_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj; + + boolean result = true; + result = result && (hasPrefix() == other.hasPrefix()); + if (hasPrefix()) { + result = result && getPrefix() + .equals(other.getPrefix()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrefix()) { + hash = (37 * hash) + PREFIX_FIELD_NUMBER; + hash = (53 * hash) + getPrefix().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + prefix_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prefix_ = prefix_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this; + if (other.hasPrefix()) { + setPrefix(other.getPrefix()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPrefix()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + prefix_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes prefix = 1; + private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasPrefix() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getPrefix() { + return prefix_; + } + public Builder setPrefix(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + prefix_ = value; + onChanged(); + return this; + } + public Builder clearPrefix() { + bitField0_ = (bitField0_ & ~0x00000001); + prefix_ = getDefaultInstance().getPrefix(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ColumnPrefixFilter) + } + + static { + defaultInstance = new ColumnPrefixFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ColumnPrefixFilter) + } + + public interface ColumnRangeFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes minColumn = 1; + boolean hasMinColumn(); + com.google.protobuf.ByteString getMinColumn(); + + // optional bool minColumnInclusive = 2; + boolean hasMinColumnInclusive(); + boolean getMinColumnInclusive(); + + // optional bytes maxColumn = 3; + boolean hasMaxColumn(); + com.google.protobuf.ByteString getMaxColumn(); + + // optional bool maxColumnInclusive = 4; + boolean hasMaxColumnInclusive(); + boolean getMaxColumnInclusive(); + } + public static final class ColumnRangeFilter extends + com.google.protobuf.GeneratedMessage + implements ColumnRangeFilterOrBuilder { + // Use ColumnRangeFilter.newBuilder() to construct. + private ColumnRangeFilter(Builder builder) { + super(builder); + } + private ColumnRangeFilter(boolean noInit) {} + + private static final ColumnRangeFilter defaultInstance; + public static ColumnRangeFilter getDefaultInstance() { + return defaultInstance; + } + + public ColumnRangeFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes minColumn = 1; + public static final int MINCOLUMN_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString minColumn_; + public boolean hasMinColumn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getMinColumn() { + return minColumn_; + } + + // optional bool minColumnInclusive = 2; + public static final int MINCOLUMNINCLUSIVE_FIELD_NUMBER = 2; + private boolean minColumnInclusive_; + public boolean hasMinColumnInclusive() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMinColumnInclusive() { + return minColumnInclusive_; + } + + // optional bytes maxColumn = 3; + public static final int MAXCOLUMN_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString maxColumn_; + public boolean hasMaxColumn() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getMaxColumn() { + return maxColumn_; + } + + // optional bool maxColumnInclusive = 4; + public static final int MAXCOLUMNINCLUSIVE_FIELD_NUMBER = 4; + private boolean maxColumnInclusive_; + public boolean hasMaxColumnInclusive() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getMaxColumnInclusive() { + return maxColumnInclusive_; + } + + private void initFields() { + minColumn_ = com.google.protobuf.ByteString.EMPTY; + minColumnInclusive_ = false; + maxColumn_ = com.google.protobuf.ByteString.EMPTY; + maxColumnInclusive_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, minColumn_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, minColumnInclusive_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, maxColumn_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, maxColumnInclusive_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, minColumn_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, minColumnInclusive_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, maxColumn_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, maxColumnInclusive_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj; + + boolean result = true; + result = result && (hasMinColumn() == other.hasMinColumn()); + if (hasMinColumn()) { + result = result && getMinColumn() + .equals(other.getMinColumn()); + } + result = result && (hasMinColumnInclusive() == other.hasMinColumnInclusive()); + if (hasMinColumnInclusive()) { + result = result && (getMinColumnInclusive() + == other.getMinColumnInclusive()); + } + result = result && (hasMaxColumn() == other.hasMaxColumn()); + if (hasMaxColumn()) { + result = result && getMaxColumn() + .equals(other.getMaxColumn()); + } + result = result && (hasMaxColumnInclusive() == other.hasMaxColumnInclusive()); + if (hasMaxColumnInclusive()) { + result = result && (getMaxColumnInclusive() + == other.getMaxColumnInclusive()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasMinColumn()) { + hash = (37 * hash) + MINCOLUMN_FIELD_NUMBER; + hash = (53 * hash) + getMinColumn().hashCode(); + } + if (hasMinColumnInclusive()) { + hash = (37 * hash) + MINCOLUMNINCLUSIVE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMinColumnInclusive()); + } + if (hasMaxColumn()) { + hash = (37 * hash) + MAXCOLUMN_FIELD_NUMBER; + hash = (53 * hash) + getMaxColumn().hashCode(); + } + if (hasMaxColumnInclusive()) { + hash = (37 * hash) + MAXCOLUMNINCLUSIVE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMaxColumnInclusive()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + minColumn_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + minColumnInclusive_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + maxColumn_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + maxColumnInclusive_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.minColumn_ = minColumn_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.minColumnInclusive_ = minColumnInclusive_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.maxColumn_ = maxColumn_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.maxColumnInclusive_ = maxColumnInclusive_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this; + if (other.hasMinColumn()) { + setMinColumn(other.getMinColumn()); + } + if (other.hasMinColumnInclusive()) { + setMinColumnInclusive(other.getMinColumnInclusive()); + } + if (other.hasMaxColumn()) { + setMaxColumn(other.getMaxColumn()); + } + if (other.hasMaxColumnInclusive()) { + setMaxColumnInclusive(other.getMaxColumnInclusive()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + minColumn_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + minColumnInclusive_ = input.readBool(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + maxColumn_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + maxColumnInclusive_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes minColumn = 1; + private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasMinColumn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getMinColumn() { + return minColumn_; + } + public Builder setMinColumn(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + minColumn_ = value; + onChanged(); + return this; + } + public Builder clearMinColumn() { + bitField0_ = (bitField0_ & ~0x00000001); + minColumn_ = getDefaultInstance().getMinColumn(); + onChanged(); + return this; + } + + // optional bool minColumnInclusive = 2; + private boolean minColumnInclusive_ ; + public boolean hasMinColumnInclusive() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMinColumnInclusive() { + return minColumnInclusive_; + } + public Builder setMinColumnInclusive(boolean value) { + bitField0_ |= 0x00000002; + minColumnInclusive_ = value; + onChanged(); + return this; + } + public Builder clearMinColumnInclusive() { + bitField0_ = (bitField0_ & ~0x00000002); + minColumnInclusive_ = false; + onChanged(); + return this; + } + + // optional bytes maxColumn = 3; + private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasMaxColumn() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getMaxColumn() { + return maxColumn_; + } + public Builder setMaxColumn(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + maxColumn_ = value; + onChanged(); + return this; + } + public Builder clearMaxColumn() { + bitField0_ = (bitField0_ & ~0x00000004); + maxColumn_ = getDefaultInstance().getMaxColumn(); + onChanged(); + return this; + } + + // optional bool maxColumnInclusive = 4; + private boolean maxColumnInclusive_ ; + public boolean hasMaxColumnInclusive() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getMaxColumnInclusive() { + return maxColumnInclusive_; + } + public Builder setMaxColumnInclusive(boolean value) { + bitField0_ |= 0x00000008; + maxColumnInclusive_ = value; + onChanged(); + return this; + } + public Builder clearMaxColumnInclusive() { + bitField0_ = (bitField0_ & ~0x00000008); + maxColumnInclusive_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ColumnRangeFilter) + } + + static { + defaultInstance = new ColumnRangeFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ColumnRangeFilter) + } + + public interface CompareFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .CompareType compareOp = 1; + boolean hasCompareOp(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); + + // optional .Comparator comparator = 2; + boolean hasComparator(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); + } + public static final class CompareFilter extends + com.google.protobuf.GeneratedMessage + implements CompareFilterOrBuilder { + // Use CompareFilter.newBuilder() to construct. + private CompareFilter(Builder builder) { + super(builder); + } + private CompareFilter(boolean noInit) {} + + private static final CompareFilter defaultInstance; + public static CompareFilter getDefaultInstance() { + return defaultInstance; + } + + public CompareFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .CompareType compareOp = 1; + public static final int COMPAREOP_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; + public boolean hasCompareOp() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { + return compareOp_; + } + + // optional .Comparator comparator = 2; + public static final int COMPARATOR_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { + return comparator_; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { + return comparator_; + } + + private void initFields() { + compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareOp()) { + memoizedIsInitialized = 0; + return false; + } + if (hasComparator()) { + if (!getComparator().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, compareOp_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, comparator_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, compareOp_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, comparator_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj; + + boolean result = true; + result = result && (hasCompareOp() == other.hasCompareOp()); + if (hasCompareOp()) { + result = result && + (getCompareOp() == other.getCompareOp()); + } + result = result && (hasComparator() == other.hasComparator()); + if (hasComparator()) { + result = result && getComparator() + .equals(other.getComparator()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCompareOp()) { + hash = (37 * hash) + COMPAREOP_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getCompareOp()); + } + if (hasComparator()) { + hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; + hash = (53 * hash) + getComparator().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparatorFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + bitField0_ = (bitField0_ & ~0x00000001); + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.compareOp_ = compareOp_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (comparatorBuilder_ == null) { + result.comparator_ = comparator_; + } else { + result.comparator_ = comparatorBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this; + if (other.hasCompareOp()) { + setCompareOp(other.getCompareOp()); + } + if (other.hasComparator()) { + mergeComparator(other.getComparator()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareOp()) { + + return false; + } + if (hasComparator()) { + if (!getComparator().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + compareOp_ = value; + } + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); + if (hasComparator()) { + subBuilder.mergeFrom(getComparator()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparator(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .CompareType compareOp = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + public boolean hasCompareOp() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { + return compareOp_; + } + public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + compareOp_ = value; + onChanged(); + return this; + } + public Builder clearCompareOp() { + bitField0_ = (bitField0_ & ~0x00000001); + compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + onChanged(); + return this; + } + + // optional .Comparator comparator = 2; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { + if (comparatorBuilder_ == null) { + return comparator_; + } else { + return comparatorBuilder_.getMessage(); + } + } + public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { + if (comparatorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparator_ = value; + onChanged(); + } else { + comparatorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setComparator( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { + if (comparatorBuilder_ == null) { + comparator_ = builderForValue.build(); + onChanged(); + } else { + comparatorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { + if (comparatorBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { + comparator_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); + } else { + comparator_ = value; + } + onChanged(); + } else { + comparatorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearComparator() { + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + onChanged(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getComparatorFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { + if (comparatorBuilder_ != null) { + return comparatorBuilder_.getMessageOrBuilder(); + } else { + return comparator_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> + getComparatorFieldBuilder() { + if (comparatorBuilder_ == null) { + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( + comparator_, + getParentForChildren(), + isClean()); + comparator_ = null; + } + return comparatorBuilder_; + } + + // @@protoc_insertion_point(builder_scope:CompareFilter) + } + + static { + defaultInstance = new CompareFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompareFilter) + } + + public interface DependentColumnFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .CompareFilter compareFilter = 1; + boolean hasCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); + + // optional bytes columnFamily = 2; + boolean hasColumnFamily(); + com.google.protobuf.ByteString getColumnFamily(); + + // optional bytes columnQualifier = 3; + boolean hasColumnQualifier(); + com.google.protobuf.ByteString getColumnQualifier(); + + // optional bool dropDependentColumn = 4; + boolean hasDropDependentColumn(); + boolean getDropDependentColumn(); + } + public static final class DependentColumnFilter extends + com.google.protobuf.GeneratedMessage + implements DependentColumnFilterOrBuilder { + // Use DependentColumnFilter.newBuilder() to construct. + private DependentColumnFilter(Builder builder) { + super(builder); + } + private DependentColumnFilter(boolean noInit) {} + + private static final DependentColumnFilter defaultInstance; + public static DependentColumnFilter getDefaultInstance() { + return defaultInstance; + } + + public DependentColumnFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .CompareFilter compareFilter = 1; + public static final int COMPAREFILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + return compareFilter_; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + return compareFilter_; + } + + // optional bytes columnFamily = 2; + public static final int COLUMNFAMILY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString columnFamily_; + public boolean hasColumnFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnFamily() { + return columnFamily_; + } + + // optional bytes columnQualifier = 3; + public static final int COLUMNQUALIFIER_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString columnQualifier_; + public boolean hasColumnQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getColumnQualifier() { + return columnQualifier_; + } + + // optional bool dropDependentColumn = 4; + public static final int DROPDEPENDENTCOLUMN_FIELD_NUMBER = 4; + private boolean dropDependentColumn_; + public boolean hasDropDependentColumn() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getDropDependentColumn() { + return dropDependentColumn_; + } + + private void initFields() { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + columnFamily_ = com.google.protobuf.ByteString.EMPTY; + columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + dropDependentColumn_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCompareFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, compareFilter_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, columnFamily_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, columnQualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, dropDependentColumn_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, compareFilter_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, columnFamily_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, columnQualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, dropDependentColumn_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj; + + boolean result = true; + result = result && (hasCompareFilter() == other.hasCompareFilter()); + if (hasCompareFilter()) { + result = result && getCompareFilter() + .equals(other.getCompareFilter()); + } + result = result && (hasColumnFamily() == other.hasColumnFamily()); + if (hasColumnFamily()) { + result = result && getColumnFamily() + .equals(other.getColumnFamily()); + } + result = result && (hasColumnQualifier() == other.hasColumnQualifier()); + if (hasColumnQualifier()) { + result = result && getColumnQualifier() + .equals(other.getColumnQualifier()); + } + result = result && (hasDropDependentColumn() == other.hasDropDependentColumn()); + if (hasDropDependentColumn()) { + result = result && (getDropDependentColumn() + == other.getDropDependentColumn()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCompareFilter()) { + hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; + hash = (53 * hash) + getCompareFilter().hashCode(); + } + if (hasColumnFamily()) { + hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamily().hashCode(); + } + if (hasColumnQualifier()) { + hash = (37 * hash) + COLUMNQUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getColumnQualifier().hashCode(); + } + if (hasDropDependentColumn()) { + hash = (37 * hash) + DROPDEPENDENTCOLUMN_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getDropDependentColumn()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCompareFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + columnFamily_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + dropDependentColumn_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (compareFilterBuilder_ == null) { + result.compareFilter_ = compareFilter_; + } else { + result.compareFilter_ = compareFilterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.columnFamily_ = columnFamily_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.columnQualifier_ = columnQualifier_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.dropDependentColumn_ = dropDependentColumn_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this; + if (other.hasCompareFilter()) { + mergeCompareFilter(other.getCompareFilter()); + } + if (other.hasColumnFamily()) { + setColumnFamily(other.getColumnFamily()); + } + if (other.hasColumnQualifier()) { + setColumnQualifier(other.getColumnQualifier()); + } + if (other.hasDropDependentColumn()) { + setDropDependentColumn(other.getDropDependentColumn()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareFilter()) { + + return false; + } + if (!getCompareFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); + if (hasCompareFilter()) { + subBuilder.mergeFrom(getCompareFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setCompareFilter(subBuilder.buildPartial()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnFamily_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + columnQualifier_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + dropDependentColumn_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .CompareFilter compareFilter = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + if (compareFilterBuilder_ == null) { + return compareFilter_; + } else { + return compareFilterBuilder_.getMessage(); + } + } + public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + compareFilter_ = value; + onChanged(); + } else { + compareFilterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setCompareFilter( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { + if (compareFilterBuilder_ == null) { + compareFilter_ = builderForValue.build(); + onChanged(); + } else { + compareFilterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { + compareFilter_ = + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); + } else { + compareFilter_ = value; + } + onChanged(); + } else { + compareFilterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearCompareFilter() { + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + onChanged(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCompareFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + if (compareFilterBuilder_ != null) { + return compareFilterBuilder_.getMessageOrBuilder(); + } else { + return compareFilter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> + getCompareFilterFieldBuilder() { + if (compareFilterBuilder_ == null) { + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( + compareFilter_, + getParentForChildren(), + isClean()); + compareFilter_ = null; + } + return compareFilterBuilder_; + } + + // optional bytes columnFamily = 2; + private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasColumnFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnFamily() { + return columnFamily_; + } + public Builder setColumnFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + columnFamily_ = value; + onChanged(); + return this; + } + public Builder clearColumnFamily() { + bitField0_ = (bitField0_ & ~0x00000002); + columnFamily_ = getDefaultInstance().getColumnFamily(); + onChanged(); + return this; + } + + // optional bytes columnQualifier = 3; + private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasColumnQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getColumnQualifier() { + return columnQualifier_; + } + public Builder setColumnQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + columnQualifier_ = value; + onChanged(); + return this; + } + public Builder clearColumnQualifier() { + bitField0_ = (bitField0_ & ~0x00000004); + columnQualifier_ = getDefaultInstance().getColumnQualifier(); + onChanged(); + return this; + } + + // optional bool dropDependentColumn = 4; + private boolean dropDependentColumn_ ; + public boolean hasDropDependentColumn() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getDropDependentColumn() { + return dropDependentColumn_; + } + public Builder setDropDependentColumn(boolean value) { + bitField0_ |= 0x00000008; + dropDependentColumn_ = value; + onChanged(); + return this; + } + public Builder clearDropDependentColumn() { + bitField0_ = (bitField0_ & ~0x00000008); + dropDependentColumn_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DependentColumnFilter) + } + + static { + defaultInstance = new DependentColumnFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DependentColumnFilter) + } + + public interface FamilyFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .CompareFilter compareFilter = 1; + boolean hasCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); + } + public static final class FamilyFilter extends + com.google.protobuf.GeneratedMessage + implements FamilyFilterOrBuilder { + // Use FamilyFilter.newBuilder() to construct. + private FamilyFilter(Builder builder) { + super(builder); + } + private FamilyFilter(boolean noInit) {} + + private static final FamilyFilter defaultInstance; + public static FamilyFilter getDefaultInstance() { + return defaultInstance; + } + + public FamilyFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .CompareFilter compareFilter = 1; + public static final int COMPAREFILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + return compareFilter_; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + return compareFilter_; + } + + private void initFields() { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCompareFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, compareFilter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, compareFilter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj; + + boolean result = true; + result = result && (hasCompareFilter() == other.hasCompareFilter()); + if (hasCompareFilter()) { + result = result && getCompareFilter() + .equals(other.getCompareFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCompareFilter()) { + hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; + hash = (53 * hash) + getCompareFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCompareFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (compareFilterBuilder_ == null) { + result.compareFilter_ = compareFilter_; + } else { + result.compareFilter_ = compareFilterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this; + if (other.hasCompareFilter()) { + mergeCompareFilter(other.getCompareFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareFilter()) { + + return false; + } + if (!getCompareFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); + if (hasCompareFilter()) { + subBuilder.mergeFrom(getCompareFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setCompareFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .CompareFilter compareFilter = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + if (compareFilterBuilder_ == null) { + return compareFilter_; + } else { + return compareFilterBuilder_.getMessage(); + } + } + public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + compareFilter_ = value; + onChanged(); + } else { + compareFilterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setCompareFilter( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { + if (compareFilterBuilder_ == null) { + compareFilter_ = builderForValue.build(); + onChanged(); + } else { + compareFilterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { + compareFilter_ = + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); + } else { + compareFilter_ = value; + } + onChanged(); + } else { + compareFilterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearCompareFilter() { + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + onChanged(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCompareFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + if (compareFilterBuilder_ != null) { + return compareFilterBuilder_.getMessageOrBuilder(); + } else { + return compareFilter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> + getCompareFilterFieldBuilder() { + if (compareFilterBuilder_ == null) { + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( + compareFilter_, + getParentForChildren(), + isClean()); + compareFilter_ = null; + } + return compareFilterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:FamilyFilter) + } + + static { + defaultInstance = new FamilyFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FamilyFilter) + } + + public interface FilterListOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .FilterList.Operator operator = 1; + boolean hasOperator(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator(); + + // repeated .Filter filters = 2; + java.util.List + getFiltersList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index); + int getFiltersCount(); + java.util.List + getFiltersOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( + int index); + } + public static final class FilterList extends + com.google.protobuf.GeneratedMessage + implements FilterListOrBuilder { + // Use FilterList.newBuilder() to construct. + private FilterList(Builder builder) { + super(builder); + } + private FilterList(boolean noInit) {} + + private static final FilterList defaultInstance; + public static FilterList getDefaultInstance() { + return defaultInstance; + } + + public FilterList getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable; + } + + public enum Operator + implements com.google.protobuf.ProtocolMessageEnum { + MUST_PASS_ALL(0, 1), + MUST_PASS_ONE(1, 2), + ; + + public static final int MUST_PASS_ALL_VALUE = 1; + public static final int MUST_PASS_ONE_VALUE = 2; + + + public final int getNumber() { return value; } + + public static Operator valueOf(int value) { + switch (value) { + case 1: return MUST_PASS_ALL; + case 2: return MUST_PASS_ONE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Operator findValueByNumber(int number) { + return Operator.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0); + } + + private static final Operator[] VALUES = { + MUST_PASS_ALL, MUST_PASS_ONE, + }; + + public static Operator valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Operator(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:FilterList.Operator) + } + + private int bitField0_; + // required .FilterList.Operator operator = 1; + public static final int OPERATOR_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_; + public boolean hasOperator() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { + return operator_; + } + + // repeated .Filter filters = 2; + public static final int FILTERS_FIELD_NUMBER = 2; + private java.util.List filters_; + public java.util.List getFiltersList() { + return filters_; + } + public java.util.List + getFiltersOrBuilderList() { + return filters_; + } + public int getFiltersCount() { + return filters_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index) { + return filters_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( + int index) { + return filters_.get(index); + } + + private void initFields() { + operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + filters_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasOperator()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getFiltersCount(); i++) { + if (!getFilters(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, operator_.getNumber()); + } + for (int i = 0; i < filters_.size(); i++) { + output.writeMessage(2, filters_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, operator_.getNumber()); + } + for (int i = 0; i < filters_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, filters_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj; + + boolean result = true; + result = result && (hasOperator() == other.hasOperator()); + if (hasOperator()) { + result = result && + (getOperator() == other.getOperator()); + } + result = result && getFiltersList() + .equals(other.getFiltersList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasOperator()) { + hash = (37 * hash) + OPERATOR_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getOperator()); + } + if (getFiltersCount() > 0) { + hash = (37 * hash) + FILTERS_FIELD_NUMBER; + hash = (53 * hash) + getFiltersList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFiltersFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + bitField0_ = (bitField0_ & ~0x00000001); + if (filtersBuilder_ == null) { + filters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + filtersBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.operator_ = operator_; + if (filtersBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + filters_ = java.util.Collections.unmodifiableList(filters_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.filters_ = filters_; + } else { + result.filters_ = filtersBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this; + if (other.hasOperator()) { + setOperator(other.getOperator()); + } + if (filtersBuilder_ == null) { + if (!other.filters_.isEmpty()) { + if (filters_.isEmpty()) { + filters_ = other.filters_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFiltersIsMutable(); + filters_.addAll(other.filters_); + } + onChanged(); + } + } else { + if (!other.filters_.isEmpty()) { + if (filtersBuilder_.isEmpty()) { + filtersBuilder_.dispose(); + filtersBuilder_ = null; + filters_ = other.filters_; + bitField0_ = (bitField0_ & ~0x00000002); + filtersBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFiltersFieldBuilder() : null; + } else { + filtersBuilder_.addAllMessages(other.filters_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasOperator()) { + + return false; + } + for (int i = 0; i < getFiltersCount(); i++) { + if (!getFilters(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + operator_ = value; + } + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFilters(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .FilterList.Operator operator = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + public boolean hasOperator() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { + return operator_; + } + public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + operator_ = value; + onChanged(); + return this; + } + public Builder clearOperator() { + bitField0_ = (bitField0_ & ~0x00000001); + operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + onChanged(); + return this; + } + + // repeated .Filter filters = 2; + private java.util.List filters_ = + java.util.Collections.emptyList(); + private void ensureFiltersIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + filters_ = new java.util.ArrayList(filters_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filtersBuilder_; + + public java.util.List getFiltersList() { + if (filtersBuilder_ == null) { + return java.util.Collections.unmodifiableList(filters_); + } else { + return filtersBuilder_.getMessageList(); + } + } + public int getFiltersCount() { + if (filtersBuilder_ == null) { + return filters_.size(); + } else { + return filtersBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index) { + if (filtersBuilder_ == null) { + return filters_.get(index); + } else { + return filtersBuilder_.getMessage(index); + } + } + public Builder setFilters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filtersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFiltersIsMutable(); + filters_.set(index, value); + onChanged(); + } else { + filtersBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFilters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.set(index, builderForValue.build()); + onChanged(); + } else { + filtersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filtersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFiltersIsMutable(); + filters_.add(value); + onChanged(); + } else { + filtersBuilder_.addMessage(value); + } + return this; + } + public Builder addFilters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filtersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFiltersIsMutable(); + filters_.add(index, value); + onChanged(); + } else { + filtersBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFilters( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.add(builderForValue.build()); + onChanged(); + } else { + filtersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFilters( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.add(index, builderForValue.build()); + onChanged(); + } else { + filtersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFilters( + java.lang.Iterable values) { + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + super.addAll(values, filters_); + onChanged(); + } else { + filtersBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFilters() { + if (filtersBuilder_ == null) { + filters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + filtersBuilder_.clear(); + } + return this; + } + public Builder removeFilters(int index) { + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.remove(index); + onChanged(); + } else { + filtersBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFiltersBuilder( + int index) { + return getFiltersFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( + int index) { + if (filtersBuilder_ == null) { + return filters_.get(index); } else { + return filtersBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFiltersOrBuilderList() { + if (filtersBuilder_ != null) { + return filtersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(filters_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder addFiltersBuilder() { + return getFiltersFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder addFiltersBuilder( + int index) { + return getFiltersFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()); + } + public java.util.List + getFiltersBuilderList() { + return getFiltersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> + getFiltersFieldBuilder() { + if (filtersBuilder_ == null) { + filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( + filters_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + filters_ = null; + } + return filtersBuilder_; + } + + // @@protoc_insertion_point(builder_scope:FilterList) + } + + static { + defaultInstance = new FilterList(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FilterList) + } + + public interface FilterWrapperOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .Filter filter = 1; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); + } + public static final class FilterWrapper extends + com.google.protobuf.GeneratedMessage + implements FilterWrapperOrBuilder { + // Use FilterWrapper.newBuilder() to construct. + private FilterWrapper(Builder builder) { + super(builder); + } + private FilterWrapper(boolean noInit) {} + + private static final FilterWrapper defaultInstance; + public static FilterWrapper getDefaultInstance() { + return defaultInstance; + } + + public FilterWrapper getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable; + } + + private int bitField0_; + // required .Filter filter = 1; + public static final int FILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + return filter_; + } + + private void initFields() { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, filter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, filter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj; + + boolean result = true; + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this; + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFilter()) { + + return false; + } + if (!getFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .Filter filter = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:FilterWrapper) + } + + static { + defaultInstance = new FilterWrapper(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FilterWrapper) + } + + public interface FirstKeyOnlyFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class FirstKeyOnlyFilter extends + com.google.protobuf.GeneratedMessage + implements FirstKeyOnlyFilterOrBuilder { + // Use FirstKeyOnlyFilter.newBuilder() to construct. + private FirstKeyOnlyFilter(Builder builder) { + super(builder); + } + private FirstKeyOnlyFilter(boolean noInit) {} + + private static final FirstKeyOnlyFilter defaultInstance; + public static FirstKeyOnlyFilter getDefaultInstance() { + return defaultInstance; + } + + public FirstKeyOnlyFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:FirstKeyOnlyFilter) + } + + static { + defaultInstance = new FirstKeyOnlyFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FirstKeyOnlyFilter) + } + + public interface FirstKeyValueMatchingQualifiersFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes qualifiers = 1; + java.util.List getQualifiersList(); + int getQualifiersCount(); + com.google.protobuf.ByteString getQualifiers(int index); + } + public static final class FirstKeyValueMatchingQualifiersFilter extends + com.google.protobuf.GeneratedMessage + implements FirstKeyValueMatchingQualifiersFilterOrBuilder { + // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct. + private FirstKeyValueMatchingQualifiersFilter(Builder builder) { + super(builder); + } + private FirstKeyValueMatchingQualifiersFilter(boolean noInit) {} + + private static final FirstKeyValueMatchingQualifiersFilter defaultInstance; + public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { + return defaultInstance; + } + + public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; + } + + // repeated bytes qualifiers = 1; + public static final int QUALIFIERS_FIELD_NUMBER = 1; + private java.util.List qualifiers_; + public java.util.List + getQualifiersList() { + return qualifiers_; + } + public int getQualifiersCount() { + return qualifiers_.size(); + } + public com.google.protobuf.ByteString getQualifiers(int index) { + return qualifiers_.get(index); + } + + private void initFields() { + qualifiers_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < qualifiers_.size(); i++) { + output.writeBytes(1, qualifiers_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < qualifiers_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(qualifiers_.get(i)); + } + size += dataSize; + size += 1 * getQualifiersList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj; + + boolean result = true; + result = result && getQualifiersList() + .equals(other.getQualifiersList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getQualifiersCount() > 0) { + hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER; + hash = (53 * hash) + getQualifiersList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + qualifiers_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.qualifiers_ = qualifiers_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this; + if (!other.qualifiers_.isEmpty()) { + if (qualifiers_.isEmpty()) { + qualifiers_ = other.qualifiers_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureQualifiersIsMutable(); + qualifiers_.addAll(other.qualifiers_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureQualifiersIsMutable(); + qualifiers_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes qualifiers = 1; + private java.util.List qualifiers_ = java.util.Collections.emptyList();; + private void ensureQualifiersIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + qualifiers_ = new java.util.ArrayList(qualifiers_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getQualifiersList() { + return java.util.Collections.unmodifiableList(qualifiers_); + } + public int getQualifiersCount() { + return qualifiers_.size(); + } + public com.google.protobuf.ByteString getQualifiers(int index) { + return qualifiers_.get(index); + } + public Builder setQualifiers( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifiersIsMutable(); + qualifiers_.set(index, value); + onChanged(); + return this; + } + public Builder addQualifiers(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifiersIsMutable(); + qualifiers_.add(value); + onChanged(); + return this; + } + public Builder addAllQualifiers( + java.lang.Iterable values) { + ensureQualifiersIsMutable(); + super.addAll(values, qualifiers_); + onChanged(); + return this; + } + public Builder clearQualifiers() { + qualifiers_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FirstKeyValueMatchingQualifiersFilter) + } + + static { + defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FirstKeyValueMatchingQualifiersFilter) + } + + public interface FuzzyRowFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .BytesBytesPair fuzzyKeysData = 1; + java.util.List + getFuzzyKeysDataList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index); + int getFuzzyKeysDataCount(); + java.util.List + getFuzzyKeysDataOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( + int index); + } + public static final class FuzzyRowFilter extends + com.google.protobuf.GeneratedMessage + implements FuzzyRowFilterOrBuilder { + // Use FuzzyRowFilter.newBuilder() to construct. + private FuzzyRowFilter(Builder builder) { + super(builder); + } + private FuzzyRowFilter(boolean noInit) {} + + private static final FuzzyRowFilter defaultInstance; + public static FuzzyRowFilter getDefaultInstance() { + return defaultInstance; + } + + public FuzzyRowFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable; + } + + // repeated .BytesBytesPair fuzzyKeysData = 1; + public static final int FUZZYKEYSDATA_FIELD_NUMBER = 1; + private java.util.List fuzzyKeysData_; + public java.util.List getFuzzyKeysDataList() { + return fuzzyKeysData_; + } + public java.util.List + getFuzzyKeysDataOrBuilderList() { + return fuzzyKeysData_; + } + public int getFuzzyKeysDataCount() { + return fuzzyKeysData_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { + return fuzzyKeysData_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( + int index) { + return fuzzyKeysData_.get(index); + } + + private void initFields() { + fuzzyKeysData_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getFuzzyKeysDataCount(); i++) { + if (!getFuzzyKeysData(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < fuzzyKeysData_.size(); i++) { + output.writeMessage(1, fuzzyKeysData_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < fuzzyKeysData_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, fuzzyKeysData_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj; + + boolean result = true; + result = result && getFuzzyKeysDataList() + .equals(other.getFuzzyKeysDataList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFuzzyKeysDataCount() > 0) { + hash = (37 * hash) + FUZZYKEYSDATA_FIELD_NUMBER; + hash = (53 * hash) + getFuzzyKeysDataList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFuzzyKeysDataFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (fuzzyKeysDataBuilder_ == null) { + fuzzyKeysData_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + fuzzyKeysDataBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this); + int from_bitField0_ = bitField0_; + if (fuzzyKeysDataBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.fuzzyKeysData_ = fuzzyKeysData_; + } else { + result.fuzzyKeysData_ = fuzzyKeysDataBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this; + if (fuzzyKeysDataBuilder_ == null) { + if (!other.fuzzyKeysData_.isEmpty()) { + if (fuzzyKeysData_.isEmpty()) { + fuzzyKeysData_ = other.fuzzyKeysData_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.addAll(other.fuzzyKeysData_); + } + onChanged(); + } + } else { + if (!other.fuzzyKeysData_.isEmpty()) { + if (fuzzyKeysDataBuilder_.isEmpty()) { + fuzzyKeysDataBuilder_.dispose(); + fuzzyKeysDataBuilder_ = null; + fuzzyKeysData_ = other.fuzzyKeysData_; + bitField0_ = (bitField0_ & ~0x00000001); + fuzzyKeysDataBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFuzzyKeysDataFieldBuilder() : null; + } else { + fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getFuzzyKeysDataCount(); i++) { + if (!getFuzzyKeysData(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFuzzyKeysData(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .BytesBytesPair fuzzyKeysData = 1; + private java.util.List fuzzyKeysData_ = + java.util.Collections.emptyList(); + private void ensureFuzzyKeysDataIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + fuzzyKeysData_ = new java.util.ArrayList(fuzzyKeysData_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_; + + public java.util.List getFuzzyKeysDataList() { + if (fuzzyKeysDataBuilder_ == null) { + return java.util.Collections.unmodifiableList(fuzzyKeysData_); + } else { + return fuzzyKeysDataBuilder_.getMessageList(); + } + } + public int getFuzzyKeysDataCount() { + if (fuzzyKeysDataBuilder_ == null) { + return fuzzyKeysData_.size(); + } else { + return fuzzyKeysDataBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { + if (fuzzyKeysDataBuilder_ == null) { + return fuzzyKeysData_.get(index); + } else { + return fuzzyKeysDataBuilder_.getMessage(index); + } + } + public Builder setFuzzyKeysData( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { + if (fuzzyKeysDataBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.set(index, value); + onChanged(); + } else { + fuzzyKeysDataBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFuzzyKeysData( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { + if (fuzzyKeysDataBuilder_ == null) { + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.set(index, builderForValue.build()); + onChanged(); + } else { + fuzzyKeysDataBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { + if (fuzzyKeysDataBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.add(value); + onChanged(); + } else { + fuzzyKeysDataBuilder_.addMessage(value); + } + return this; + } + public Builder addFuzzyKeysData( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { + if (fuzzyKeysDataBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.add(index, value); + onChanged(); + } else { + fuzzyKeysDataBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFuzzyKeysData( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { + if (fuzzyKeysDataBuilder_ == null) { + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.add(builderForValue.build()); + onChanged(); + } else { + fuzzyKeysDataBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFuzzyKeysData( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { + if (fuzzyKeysDataBuilder_ == null) { + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.add(index, builderForValue.build()); + onChanged(); + } else { + fuzzyKeysDataBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFuzzyKeysData( + java.lang.Iterable values) { + if (fuzzyKeysDataBuilder_ == null) { + ensureFuzzyKeysDataIsMutable(); + super.addAll(values, fuzzyKeysData_); + onChanged(); + } else { + fuzzyKeysDataBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFuzzyKeysData() { + if (fuzzyKeysDataBuilder_ == null) { + fuzzyKeysData_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + fuzzyKeysDataBuilder_.clear(); + } + return this; + } + public Builder removeFuzzyKeysData(int index) { + if (fuzzyKeysDataBuilder_ == null) { + ensureFuzzyKeysDataIsMutable(); + fuzzyKeysData_.remove(index); + onChanged(); + } else { + fuzzyKeysDataBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder( + int index) { + return getFuzzyKeysDataFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( + int index) { + if (fuzzyKeysDataBuilder_ == null) { + return fuzzyKeysData_.get(index); } else { + return fuzzyKeysDataBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFuzzyKeysDataOrBuilderList() { + if (fuzzyKeysDataBuilder_ != null) { + return fuzzyKeysDataBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(fuzzyKeysData_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() { + return getFuzzyKeysDataFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder( + int index) { + return getFuzzyKeysDataFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); + } + public java.util.List + getFuzzyKeysDataBuilderList() { + return getFuzzyKeysDataFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> + getFuzzyKeysDataFieldBuilder() { + if (fuzzyKeysDataBuilder_ == null) { + fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( + fuzzyKeysData_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + fuzzyKeysData_ = null; + } + return fuzzyKeysDataBuilder_; + } + + // @@protoc_insertion_point(builder_scope:FuzzyRowFilter) + } + + static { + defaultInstance = new FuzzyRowFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FuzzyRowFilter) + } + + public interface InclusiveStopFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes stopRowKey = 1; + boolean hasStopRowKey(); + com.google.protobuf.ByteString getStopRowKey(); + } + public static final class InclusiveStopFilter extends + com.google.protobuf.GeneratedMessage + implements InclusiveStopFilterOrBuilder { + // Use InclusiveStopFilter.newBuilder() to construct. + private InclusiveStopFilter(Builder builder) { + super(builder); + } + private InclusiveStopFilter(boolean noInit) {} + + private static final InclusiveStopFilter defaultInstance; + public static InclusiveStopFilter getDefaultInstance() { + return defaultInstance; + } + + public InclusiveStopFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes stopRowKey = 1; + public static final int STOPROWKEY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString stopRowKey_; + public boolean hasStopRowKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getStopRowKey() { + return stopRowKey_; + } + + private void initFields() { + stopRowKey_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, stopRowKey_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, stopRowKey_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj; + + boolean result = true; + result = result && (hasStopRowKey() == other.hasStopRowKey()); + if (hasStopRowKey()) { + result = result && getStopRowKey() + .equals(other.getStopRowKey()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasStopRowKey()) { + hash = (37 * hash) + STOPROWKEY_FIELD_NUMBER; + hash = (53 * hash) + getStopRowKey().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + stopRowKey_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.stopRowKey_ = stopRowKey_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this; + if (other.hasStopRowKey()) { + setStopRowKey(other.getStopRowKey()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + stopRowKey_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes stopRowKey = 1; + private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStopRowKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getStopRowKey() { + return stopRowKey_; + } + public Builder setStopRowKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + stopRowKey_ = value; + onChanged(); + return this; + } + public Builder clearStopRowKey() { + bitField0_ = (bitField0_ & ~0x00000001); + stopRowKey_ = getDefaultInstance().getStopRowKey(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:InclusiveStopFilter) + } + + static { + defaultInstance = new InclusiveStopFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:InclusiveStopFilter) + } + + public interface KeyOnlyFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool lenAsVal = 1; + boolean hasLenAsVal(); + boolean getLenAsVal(); + } + public static final class KeyOnlyFilter extends + com.google.protobuf.GeneratedMessage + implements KeyOnlyFilterOrBuilder { + // Use KeyOnlyFilter.newBuilder() to construct. + private KeyOnlyFilter(Builder builder) { + super(builder); + } + private KeyOnlyFilter(boolean noInit) {} + + private static final KeyOnlyFilter defaultInstance; + public static KeyOnlyFilter getDefaultInstance() { + return defaultInstance; + } + + public KeyOnlyFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable; + } + + private int bitField0_; + // required bool lenAsVal = 1; + public static final int LENASVAL_FIELD_NUMBER = 1; + private boolean lenAsVal_; + public boolean hasLenAsVal() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLenAsVal() { + return lenAsVal_; + } + + private void initFields() { + lenAsVal_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLenAsVal()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, lenAsVal_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, lenAsVal_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj; + + boolean result = true; + result = result && (hasLenAsVal() == other.hasLenAsVal()); + if (hasLenAsVal()) { + result = result && (getLenAsVal() + == other.getLenAsVal()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLenAsVal()) { + hash = (37 * hash) + LENASVAL_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLenAsVal()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lenAsVal_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lenAsVal_ = lenAsVal_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this; + if (other.hasLenAsVal()) { + setLenAsVal(other.getLenAsVal()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLenAsVal()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lenAsVal_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool lenAsVal = 1; + private boolean lenAsVal_ ; + public boolean hasLenAsVal() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLenAsVal() { + return lenAsVal_; + } + public Builder setLenAsVal(boolean value) { + bitField0_ |= 0x00000001; + lenAsVal_ = value; + onChanged(); + return this; + } + public Builder clearLenAsVal() { + bitField0_ = (bitField0_ & ~0x00000001); + lenAsVal_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:KeyOnlyFilter) + } + + static { + defaultInstance = new KeyOnlyFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:KeyOnlyFilter) + } + + public interface MultipleColumnPrefixFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes sortedPrefixes = 1; + java.util.List getSortedPrefixesList(); + int getSortedPrefixesCount(); + com.google.protobuf.ByteString getSortedPrefixes(int index); + } + public static final class MultipleColumnPrefixFilter extends + com.google.protobuf.GeneratedMessage + implements MultipleColumnPrefixFilterOrBuilder { + // Use MultipleColumnPrefixFilter.newBuilder() to construct. + private MultipleColumnPrefixFilter(Builder builder) { + super(builder); + } + private MultipleColumnPrefixFilter(boolean noInit) {} + + private static final MultipleColumnPrefixFilter defaultInstance; + public static MultipleColumnPrefixFilter getDefaultInstance() { + return defaultInstance; + } + + public MultipleColumnPrefixFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; + } + + // repeated bytes sortedPrefixes = 1; + public static final int SORTEDPREFIXES_FIELD_NUMBER = 1; + private java.util.List sortedPrefixes_; + public java.util.List + getSortedPrefixesList() { + return sortedPrefixes_; + } + public int getSortedPrefixesCount() { + return sortedPrefixes_.size(); + } + public com.google.protobuf.ByteString getSortedPrefixes(int index) { + return sortedPrefixes_.get(index); + } + + private void initFields() { + sortedPrefixes_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < sortedPrefixes_.size(); i++) { + output.writeBytes(1, sortedPrefixes_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < sortedPrefixes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(sortedPrefixes_.get(i)); + } + size += dataSize; + size += 1 * getSortedPrefixesList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj; + + boolean result = true; + result = result && getSortedPrefixesList() + .equals(other.getSortedPrefixesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getSortedPrefixesCount() > 0) { + hash = (37 * hash) + SORTEDPREFIXES_FIELD_NUMBER; + hash = (53 * hash) + getSortedPrefixesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + sortedPrefixes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.sortedPrefixes_ = sortedPrefixes_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance()) return this; + if (!other.sortedPrefixes_.isEmpty()) { + if (sortedPrefixes_.isEmpty()) { + sortedPrefixes_ = other.sortedPrefixes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureSortedPrefixesIsMutable(); + sortedPrefixes_.addAll(other.sortedPrefixes_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureSortedPrefixesIsMutable(); + sortedPrefixes_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes sortedPrefixes = 1; + private java.util.List sortedPrefixes_ = java.util.Collections.emptyList();; + private void ensureSortedPrefixesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + sortedPrefixes_ = new java.util.ArrayList(sortedPrefixes_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getSortedPrefixesList() { + return java.util.Collections.unmodifiableList(sortedPrefixes_); + } + public int getSortedPrefixesCount() { + return sortedPrefixes_.size(); + } + public com.google.protobuf.ByteString getSortedPrefixes(int index) { + return sortedPrefixes_.get(index); + } + public Builder setSortedPrefixes( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSortedPrefixesIsMutable(); + sortedPrefixes_.set(index, value); + onChanged(); + return this; + } + public Builder addSortedPrefixes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSortedPrefixesIsMutable(); + sortedPrefixes_.add(value); + onChanged(); + return this; + } + public Builder addAllSortedPrefixes( + java.lang.Iterable values) { + ensureSortedPrefixesIsMutable(); + super.addAll(values, sortedPrefixes_); + onChanged(); + return this; + } + public Builder clearSortedPrefixes() { + sortedPrefixes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MultipleColumnPrefixFilter) + } + + static { + defaultInstance = new MultipleColumnPrefixFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultipleColumnPrefixFilter) + } + + public interface PageFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int64 pageSize = 1; + boolean hasPageSize(); + long getPageSize(); + } + public static final class PageFilter extends + com.google.protobuf.GeneratedMessage + implements PageFilterOrBuilder { + // Use PageFilter.newBuilder() to construct. + private PageFilter(Builder builder) { + super(builder); + } + private PageFilter(boolean noInit) {} + + private static final PageFilter defaultInstance; + public static PageFilter getDefaultInstance() { + return defaultInstance; + } + + public PageFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable; + } + + private int bitField0_; + // required int64 pageSize = 1; + public static final int PAGESIZE_FIELD_NUMBER = 1; + private long pageSize_; + public boolean hasPageSize() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getPageSize() { + return pageSize_; + } + + private void initFields() { + pageSize_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPageSize()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, pageSize_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, pageSize_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) obj; + + boolean result = true; + result = result && (hasPageSize() == other.hasPageSize()); + if (hasPageSize()) { + result = result && (getPageSize() + == other.getPageSize()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPageSize()) { + hash = (37 * hash) + PAGESIZE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getPageSize()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + pageSize_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.pageSize_ = pageSize_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance()) return this; + if (other.hasPageSize()) { + setPageSize(other.getPageSize()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPageSize()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + pageSize_ = input.readInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required int64 pageSize = 1; + private long pageSize_ ; + public boolean hasPageSize() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getPageSize() { + return pageSize_; + } + public Builder setPageSize(long value) { + bitField0_ |= 0x00000001; + pageSize_ = value; + onChanged(); + return this; + } + public Builder clearPageSize() { + bitField0_ = (bitField0_ & ~0x00000001); + pageSize_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:PageFilter) + } + + static { + defaultInstance = new PageFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PageFilter) + } + + public interface PrefixFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes prefix = 1; + boolean hasPrefix(); + com.google.protobuf.ByteString getPrefix(); + } + public static final class PrefixFilter extends + com.google.protobuf.GeneratedMessage + implements PrefixFilterOrBuilder { + // Use PrefixFilter.newBuilder() to construct. + private PrefixFilter(Builder builder) { + super(builder); + } + private PrefixFilter(boolean noInit) {} + + private static final PrefixFilter defaultInstance; + public static PrefixFilter getDefaultInstance() { + return defaultInstance; + } + + public PrefixFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes prefix = 1; + public static final int PREFIX_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString prefix_; + public boolean hasPrefix() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getPrefix() { + return prefix_; + } + + private void initFields() { + prefix_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, prefix_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, prefix_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) obj; + + boolean result = true; + result = result && (hasPrefix() == other.hasPrefix()); + if (hasPrefix()) { + result = result && getPrefix() + .equals(other.getPrefix()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrefix()) { + hash = (37 * hash) + PREFIX_FIELD_NUMBER; + hash = (53 * hash) + getPrefix().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + prefix_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prefix_ = prefix_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance()) return this; + if (other.hasPrefix()) { + setPrefix(other.getPrefix()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + prefix_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes prefix = 1; + private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasPrefix() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getPrefix() { + return prefix_; + } + public Builder setPrefix(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + prefix_ = value; + onChanged(); + return this; + } + public Builder clearPrefix() { + bitField0_ = (bitField0_ & ~0x00000001); + prefix_ = getDefaultInstance().getPrefix(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:PrefixFilter) + } + + static { + defaultInstance = new PrefixFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PrefixFilter) + } + + public interface QualifierFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .CompareFilter compareFilter = 1; + boolean hasCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); + } + public static final class QualifierFilter extends + com.google.protobuf.GeneratedMessage + implements QualifierFilterOrBuilder { + // Use QualifierFilter.newBuilder() to construct. + private QualifierFilter(Builder builder) { + super(builder); + } + private QualifierFilter(boolean noInit) {} + + private static final QualifierFilter defaultInstance; + public static QualifierFilter getDefaultInstance() { + return defaultInstance; + } + + public QualifierFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .CompareFilter compareFilter = 1; + public static final int COMPAREFILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + return compareFilter_; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + return compareFilter_; + } + + private void initFields() { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCompareFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, compareFilter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, compareFilter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) obj; + + boolean result = true; + result = result && (hasCompareFilter() == other.hasCompareFilter()); + if (hasCompareFilter()) { + result = result && getCompareFilter() + .equals(other.getCompareFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCompareFilter()) { + hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; + hash = (53 * hash) + getCompareFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCompareFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (compareFilterBuilder_ == null) { + result.compareFilter_ = compareFilter_; + } else { + result.compareFilter_ = compareFilterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance()) return this; + if (other.hasCompareFilter()) { + mergeCompareFilter(other.getCompareFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareFilter()) { + + return false; + } + if (!getCompareFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); + if (hasCompareFilter()) { + subBuilder.mergeFrom(getCompareFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setCompareFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .CompareFilter compareFilter = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + if (compareFilterBuilder_ == null) { + return compareFilter_; + } else { + return compareFilterBuilder_.getMessage(); + } + } + public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + compareFilter_ = value; + onChanged(); + } else { + compareFilterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setCompareFilter( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { + if (compareFilterBuilder_ == null) { + compareFilter_ = builderForValue.build(); + onChanged(); + } else { + compareFilterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { + compareFilter_ = + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); + } else { + compareFilter_ = value; + } + onChanged(); + } else { + compareFilterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearCompareFilter() { + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + onChanged(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCompareFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + if (compareFilterBuilder_ != null) { + return compareFilterBuilder_.getMessageOrBuilder(); + } else { + return compareFilter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> + getCompareFilterFieldBuilder() { + if (compareFilterBuilder_ == null) { + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( + compareFilter_, + getParentForChildren(), + isClean()); + compareFilter_ = null; + } + return compareFilterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:QualifierFilter) + } + + static { + defaultInstance = new QualifierFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:QualifierFilter) + } + + public interface RandomRowFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required float chance = 1; + boolean hasChance(); + float getChance(); + } + public static final class RandomRowFilter extends + com.google.protobuf.GeneratedMessage + implements RandomRowFilterOrBuilder { + // Use RandomRowFilter.newBuilder() to construct. + private RandomRowFilter(Builder builder) { + super(builder); + } + private RandomRowFilter(boolean noInit) {} + + private static final RandomRowFilter defaultInstance; + public static RandomRowFilter getDefaultInstance() { + return defaultInstance; + } + + public RandomRowFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable; + } + + private int bitField0_; + // required float chance = 1; + public static final int CHANCE_FIELD_NUMBER = 1; + private float chance_; + public boolean hasChance() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public float getChance() { + return chance_; + } + + private void initFields() { + chance_ = 0F; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasChance()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeFloat(1, chance_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize(1, chance_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) obj; + + boolean result = true; + result = result && (hasChance() == other.hasChance()); + if (hasChance()) { + result = result && (Float.floatToIntBits(getChance()) == Float.floatToIntBits(other.getChance())); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasChance()) { + hash = (37 * hash) + CHANCE_FIELD_NUMBER; + hash = (53 * hash) + Float.floatToIntBits( + getChance()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + chance_ = 0F; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.chance_ = chance_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance()) return this; + if (other.hasChance()) { + setChance(other.getChance()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasChance()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 13: { + bitField0_ |= 0x00000001; + chance_ = input.readFloat(); + break; + } + } + } + } + + private int bitField0_; + + // required float chance = 1; + private float chance_ ; + public boolean hasChance() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public float getChance() { + return chance_; + } + public Builder setChance(float value) { + bitField0_ |= 0x00000001; + chance_ = value; + onChanged(); + return this; + } + public Builder clearChance() { + bitField0_ = (bitField0_ & ~0x00000001); + chance_ = 0F; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RandomRowFilter) + } + + static { + defaultInstance = new RandomRowFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RandomRowFilter) + } + + public interface RowFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .CompareFilter compareFilter = 1; + boolean hasCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); + } + public static final class RowFilter extends + com.google.protobuf.GeneratedMessage + implements RowFilterOrBuilder { + // Use RowFilter.newBuilder() to construct. + private RowFilter(Builder builder) { + super(builder); + } + private RowFilter(boolean noInit) {} + + private static final RowFilter defaultInstance; + public static RowFilter getDefaultInstance() { + return defaultInstance; + } + + public RowFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .CompareFilter compareFilter = 1; + public static final int COMPAREFILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + return compareFilter_; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + return compareFilter_; + } + + private void initFields() { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCompareFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, compareFilter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, compareFilter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) obj; + + boolean result = true; + result = result && (hasCompareFilter() == other.hasCompareFilter()); + if (hasCompareFilter()) { + result = result && getCompareFilter() + .equals(other.getCompareFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCompareFilter()) { + hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; + hash = (53 * hash) + getCompareFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCompareFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (compareFilterBuilder_ == null) { + result.compareFilter_ = compareFilter_; + } else { + result.compareFilter_ = compareFilterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance()) return this; + if (other.hasCompareFilter()) { + mergeCompareFilter(other.getCompareFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareFilter()) { + + return false; + } + if (!getCompareFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); + if (hasCompareFilter()) { + subBuilder.mergeFrom(getCompareFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setCompareFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .CompareFilter compareFilter = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + if (compareFilterBuilder_ == null) { + return compareFilter_; + } else { + return compareFilterBuilder_.getMessage(); + } + } + public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + compareFilter_ = value; + onChanged(); + } else { + compareFilterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setCompareFilter( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { + if (compareFilterBuilder_ == null) { + compareFilter_ = builderForValue.build(); + onChanged(); + } else { + compareFilterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { + compareFilter_ = + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); + } else { + compareFilter_ = value; + } + onChanged(); + } else { + compareFilterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearCompareFilter() { + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + onChanged(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCompareFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + if (compareFilterBuilder_ != null) { + return compareFilterBuilder_.getMessageOrBuilder(); + } else { + return compareFilter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> + getCompareFilterFieldBuilder() { + if (compareFilterBuilder_ == null) { + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( + compareFilter_, + getParentForChildren(), + isClean()); + compareFilter_ = null; + } + return compareFilterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RowFilter) + } + + static { + defaultInstance = new RowFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowFilter) + } + + public interface SingleColumnValueExcludeFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .SingleColumnValueFilter singleColumnValueFilter = 1; + boolean hasSingleColumnValueFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder(); + } + public static final class SingleColumnValueExcludeFilter extends + com.google.protobuf.GeneratedMessage + implements SingleColumnValueExcludeFilterOrBuilder { + // Use SingleColumnValueExcludeFilter.newBuilder() to construct. + private SingleColumnValueExcludeFilter(Builder builder) { + super(builder); + } + private SingleColumnValueExcludeFilter(boolean noInit) {} + + private static final SingleColumnValueExcludeFilter defaultInstance; + public static SingleColumnValueExcludeFilter getDefaultInstance() { + return defaultInstance; + } + + public SingleColumnValueExcludeFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .SingleColumnValueFilter singleColumnValueFilter = 1; + public static final int SINGLECOLUMNVALUEFILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_; + public boolean hasSingleColumnValueFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { + return singleColumnValueFilter_; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { + return singleColumnValueFilter_; + } + + private void initFields() { + singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasSingleColumnValueFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getSingleColumnValueFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, singleColumnValueFilter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, singleColumnValueFilter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) obj; + + boolean result = true; + result = result && (hasSingleColumnValueFilter() == other.hasSingleColumnValueFilter()); + if (hasSingleColumnValueFilter()) { + result = result && getSingleColumnValueFilter() + .equals(other.getSingleColumnValueFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSingleColumnValueFilter()) { + hash = (37 * hash) + SINGLECOLUMNVALUEFILTER_FIELD_NUMBER; + hash = (53 * hash) + getSingleColumnValueFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getSingleColumnValueFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (singleColumnValueFilterBuilder_ == null) { + singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + } else { + singleColumnValueFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (singleColumnValueFilterBuilder_ == null) { + result.singleColumnValueFilter_ = singleColumnValueFilter_; + } else { + result.singleColumnValueFilter_ = singleColumnValueFilterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance()) return this; + if (other.hasSingleColumnValueFilter()) { + mergeSingleColumnValueFilter(other.getSingleColumnValueFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasSingleColumnValueFilter()) { + + return false; + } + if (!getSingleColumnValueFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(); + if (hasSingleColumnValueFilter()) { + subBuilder.mergeFrom(getSingleColumnValueFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setSingleColumnValueFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .SingleColumnValueFilter singleColumnValueFilter = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_; + public boolean hasSingleColumnValueFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { + if (singleColumnValueFilterBuilder_ == null) { + return singleColumnValueFilter_; + } else { + return singleColumnValueFilterBuilder_.getMessage(); + } + } + public Builder setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { + if (singleColumnValueFilterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + singleColumnValueFilter_ = value; + onChanged(); + } else { + singleColumnValueFilterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setSingleColumnValueFilter( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue) { + if (singleColumnValueFilterBuilder_ == null) { + singleColumnValueFilter_ = builderForValue.build(); + onChanged(); + } else { + singleColumnValueFilterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { + if (singleColumnValueFilterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + singleColumnValueFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) { + singleColumnValueFilter_ = + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(singleColumnValueFilter_).mergeFrom(value).buildPartial(); + } else { + singleColumnValueFilter_ = value; + } + onChanged(); + } else { + singleColumnValueFilterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearSingleColumnValueFilter() { + if (singleColumnValueFilterBuilder_ == null) { + singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + onChanged(); + } else { + singleColumnValueFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder getSingleColumnValueFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getSingleColumnValueFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { + if (singleColumnValueFilterBuilder_ != null) { + return singleColumnValueFilterBuilder_.getMessageOrBuilder(); + } else { + return singleColumnValueFilter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> + getSingleColumnValueFilterFieldBuilder() { + if (singleColumnValueFilterBuilder_ == null) { + singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>( + singleColumnValueFilter_, + getParentForChildren(), + isClean()); + singleColumnValueFilter_ = null; + } + return singleColumnValueFilterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:SingleColumnValueExcludeFilter) + } + + static { + defaultInstance = new SingleColumnValueExcludeFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SingleColumnValueExcludeFilter) + } + + public interface SingleColumnValueFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes columnFamily = 1; + boolean hasColumnFamily(); + com.google.protobuf.ByteString getColumnFamily(); + + // optional bytes columnQualifier = 2; + boolean hasColumnQualifier(); + com.google.protobuf.ByteString getColumnQualifier(); + + // required .CompareType compareOp = 3; + boolean hasCompareOp(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); + + // required .Comparator comparator = 4; + boolean hasComparator(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); + + // optional bool foundColumn = 5; + boolean hasFoundColumn(); + boolean getFoundColumn(); + + // optional bool matchedColumn = 6; + boolean hasMatchedColumn(); + boolean getMatchedColumn(); + + // optional bool filterIfMissing = 7; + boolean hasFilterIfMissing(); + boolean getFilterIfMissing(); + + // optional bool latestVersionOnly = 8; + boolean hasLatestVersionOnly(); + boolean getLatestVersionOnly(); + } + public static final class SingleColumnValueFilter extends + com.google.protobuf.GeneratedMessage + implements SingleColumnValueFilterOrBuilder { + // Use SingleColumnValueFilter.newBuilder() to construct. + private SingleColumnValueFilter(Builder builder) { + super(builder); + } + private SingleColumnValueFilter(boolean noInit) {} + + private static final SingleColumnValueFilter defaultInstance; + public static SingleColumnValueFilter getDefaultInstance() { + return defaultInstance; + } + + public SingleColumnValueFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes columnFamily = 1; + public static final int COLUMNFAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString columnFamily_; + public boolean hasColumnFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getColumnFamily() { + return columnFamily_; + } + + // optional bytes columnQualifier = 2; + public static final int COLUMNQUALIFIER_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString columnQualifier_; + public boolean hasColumnQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnQualifier() { + return columnQualifier_; + } + + // required .CompareType compareOp = 3; + public static final int COMPAREOP_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; + public boolean hasCompareOp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { + return compareOp_; + } + + // required .Comparator comparator = 4; + public static final int COMPARATOR_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { + return comparator_; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { + return comparator_; + } + + // optional bool foundColumn = 5; + public static final int FOUNDCOLUMN_FIELD_NUMBER = 5; + private boolean foundColumn_; + public boolean hasFoundColumn() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getFoundColumn() { + return foundColumn_; + } + + // optional bool matchedColumn = 6; + public static final int MATCHEDCOLUMN_FIELD_NUMBER = 6; + private boolean matchedColumn_; + public boolean hasMatchedColumn() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getMatchedColumn() { + return matchedColumn_; + } + + // optional bool filterIfMissing = 7; + public static final int FILTERIFMISSING_FIELD_NUMBER = 7; + private boolean filterIfMissing_; + public boolean hasFilterIfMissing() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public boolean getFilterIfMissing() { + return filterIfMissing_; + } + + // optional bool latestVersionOnly = 8; + public static final int LATESTVERSIONONLY_FIELD_NUMBER = 8; + private boolean latestVersionOnly_; + public boolean hasLatestVersionOnly() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getLatestVersionOnly() { + return latestVersionOnly_; + } + + private void initFields() { + columnFamily_ = com.google.protobuf.ByteString.EMPTY; + columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + foundColumn_ = false; + matchedColumn_ = false; + filterIfMissing_ = false; + latestVersionOnly_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareOp()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasComparator()) { + memoizedIsInitialized = 0; + return false; + } + if (!getComparator().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, columnFamily_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, columnQualifier_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeEnum(3, compareOp_.getNumber()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(4, comparator_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, foundColumn_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(6, matchedColumn_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeBool(7, filterIfMissing_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeBool(8, latestVersionOnly_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, columnFamily_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, columnQualifier_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(3, compareOp_.getNumber()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, comparator_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, foundColumn_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, matchedColumn_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, filterIfMissing_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, latestVersionOnly_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) obj; + + boolean result = true; + result = result && (hasColumnFamily() == other.hasColumnFamily()); + if (hasColumnFamily()) { + result = result && getColumnFamily() + .equals(other.getColumnFamily()); + } + result = result && (hasColumnQualifier() == other.hasColumnQualifier()); + if (hasColumnQualifier()) { + result = result && getColumnQualifier() + .equals(other.getColumnQualifier()); + } + result = result && (hasCompareOp() == other.hasCompareOp()); + if (hasCompareOp()) { + result = result && + (getCompareOp() == other.getCompareOp()); + } + result = result && (hasComparator() == other.hasComparator()); + if (hasComparator()) { + result = result && getComparator() + .equals(other.getComparator()); + } + result = result && (hasFoundColumn() == other.hasFoundColumn()); + if (hasFoundColumn()) { + result = result && (getFoundColumn() + == other.getFoundColumn()); + } + result = result && (hasMatchedColumn() == other.hasMatchedColumn()); + if (hasMatchedColumn()) { + result = result && (getMatchedColumn() + == other.getMatchedColumn()); + } + result = result && (hasFilterIfMissing() == other.hasFilterIfMissing()); + if (hasFilterIfMissing()) { + result = result && (getFilterIfMissing() + == other.getFilterIfMissing()); + } + result = result && (hasLatestVersionOnly() == other.hasLatestVersionOnly()); + if (hasLatestVersionOnly()) { + result = result && (getLatestVersionOnly() + == other.getLatestVersionOnly()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasColumnFamily()) { + hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamily().hashCode(); + } + if (hasColumnQualifier()) { + hash = (37 * hash) + COLUMNQUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getColumnQualifier().hashCode(); + } + if (hasCompareOp()) { + hash = (37 * hash) + COMPAREOP_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getCompareOp()); + } + if (hasComparator()) { + hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; + hash = (53 * hash) + getComparator().hashCode(); + } + if (hasFoundColumn()) { + hash = (37 * hash) + FOUNDCOLUMN_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getFoundColumn()); + } + if (hasMatchedColumn()) { + hash = (37 * hash) + MATCHEDCOLUMN_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMatchedColumn()); + } + if (hasFilterIfMissing()) { + hash = (37 * hash) + FILTERIFMISSING_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getFilterIfMissing()); + } + if (hasLatestVersionOnly()) { + hash = (37 * hash) + LATESTVERSIONONLY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLatestVersionOnly()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparatorFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + columnFamily_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + bitField0_ = (bitField0_ & ~0x00000004); + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + foundColumn_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + matchedColumn_ = false; + bitField0_ = (bitField0_ & ~0x00000020); + filterIfMissing_ = false; + bitField0_ = (bitField0_ & ~0x00000040); + latestVersionOnly_ = false; + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.columnFamily_ = columnFamily_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.columnQualifier_ = columnQualifier_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.compareOp_ = compareOp_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + if (comparatorBuilder_ == null) { + result.comparator_ = comparator_; + } else { + result.comparator_ = comparatorBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.foundColumn_ = foundColumn_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.matchedColumn_ = matchedColumn_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000040; + } + result.filterIfMissing_ = filterIfMissing_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000080; + } + result.latestVersionOnly_ = latestVersionOnly_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) return this; + if (other.hasColumnFamily()) { + setColumnFamily(other.getColumnFamily()); + } + if (other.hasColumnQualifier()) { + setColumnQualifier(other.getColumnQualifier()); + } + if (other.hasCompareOp()) { + setCompareOp(other.getCompareOp()); + } + if (other.hasComparator()) { + mergeComparator(other.getComparator()); + } + if (other.hasFoundColumn()) { + setFoundColumn(other.getFoundColumn()); + } + if (other.hasMatchedColumn()) { + setMatchedColumn(other.getMatchedColumn()); + } + if (other.hasFilterIfMissing()) { + setFilterIfMissing(other.getFilterIfMissing()); + } + if (other.hasLatestVersionOnly()) { + setLatestVersionOnly(other.getLatestVersionOnly()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareOp()) { + + return false; + } + if (!hasComparator()) { + + return false; + } + if (!getComparator().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + columnFamily_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnQualifier_ = input.readBytes(); + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + bitField0_ |= 0x00000004; + compareOp_ = value; + } + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); + if (hasComparator()) { + subBuilder.mergeFrom(getComparator()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparator(subBuilder.buildPartial()); + break; + } + case 40: { + bitField0_ |= 0x00000010; + foundColumn_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + matchedColumn_ = input.readBool(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + filterIfMissing_ = input.readBool(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + latestVersionOnly_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes columnFamily = 1; + private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasColumnFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getColumnFamily() { + return columnFamily_; + } + public Builder setColumnFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + columnFamily_ = value; + onChanged(); + return this; + } + public Builder clearColumnFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + columnFamily_ = getDefaultInstance().getColumnFamily(); + onChanged(); + return this; + } + + // optional bytes columnQualifier = 2; + private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasColumnQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnQualifier() { + return columnQualifier_; + } + public Builder setColumnQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + columnQualifier_ = value; + onChanged(); + return this; + } + public Builder clearColumnQualifier() { + bitField0_ = (bitField0_ & ~0x00000002); + columnQualifier_ = getDefaultInstance().getColumnQualifier(); + onChanged(); + return this; + } + + // required .CompareType compareOp = 3; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + public boolean hasCompareOp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { + return compareOp_; + } + public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + compareOp_ = value; + onChanged(); + return this; + } + public Builder clearCompareOp() { + bitField0_ = (bitField0_ & ~0x00000004); + compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; + onChanged(); + return this; + } + + // required .Comparator comparator = 4; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { + if (comparatorBuilder_ == null) { + return comparator_; + } else { + return comparatorBuilder_.getMessage(); + } + } + public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { + if (comparatorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparator_ = value; + onChanged(); + } else { + comparatorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder setComparator( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { + if (comparatorBuilder_ == null) { + comparator_ = builderForValue.build(); + onChanged(); + } else { + comparatorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { + if (comparatorBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { + comparator_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); + } else { + comparator_ = value; + } + onChanged(); + } else { + comparatorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder clearComparator() { + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + onChanged(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getComparatorFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { + if (comparatorBuilder_ != null) { + return comparatorBuilder_.getMessageOrBuilder(); + } else { + return comparator_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> + getComparatorFieldBuilder() { + if (comparatorBuilder_ == null) { + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( + comparator_, + getParentForChildren(), + isClean()); + comparator_ = null; + } + return comparatorBuilder_; + } + + // optional bool foundColumn = 5; + private boolean foundColumn_ ; + public boolean hasFoundColumn() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getFoundColumn() { + return foundColumn_; + } + public Builder setFoundColumn(boolean value) { + bitField0_ |= 0x00000010; + foundColumn_ = value; + onChanged(); + return this; + } + public Builder clearFoundColumn() { + bitField0_ = (bitField0_ & ~0x00000010); + foundColumn_ = false; + onChanged(); + return this; + } + + // optional bool matchedColumn = 6; + private boolean matchedColumn_ ; + public boolean hasMatchedColumn() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getMatchedColumn() { + return matchedColumn_; + } + public Builder setMatchedColumn(boolean value) { + bitField0_ |= 0x00000020; + matchedColumn_ = value; + onChanged(); + return this; + } + public Builder clearMatchedColumn() { + bitField0_ = (bitField0_ & ~0x00000020); + matchedColumn_ = false; + onChanged(); + return this; + } + + // optional bool filterIfMissing = 7; + private boolean filterIfMissing_ ; + public boolean hasFilterIfMissing() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public boolean getFilterIfMissing() { + return filterIfMissing_; + } + public Builder setFilterIfMissing(boolean value) { + bitField0_ |= 0x00000040; + filterIfMissing_ = value; + onChanged(); + return this; + } + public Builder clearFilterIfMissing() { + bitField0_ = (bitField0_ & ~0x00000040); + filterIfMissing_ = false; + onChanged(); + return this; + } + + // optional bool latestVersionOnly = 8; + private boolean latestVersionOnly_ ; + public boolean hasLatestVersionOnly() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getLatestVersionOnly() { + return latestVersionOnly_; + } + public Builder setLatestVersionOnly(boolean value) { + bitField0_ |= 0x00000080; + latestVersionOnly_ = value; + onChanged(); + return this; + } + public Builder clearLatestVersionOnly() { + bitField0_ = (bitField0_ & ~0x00000080); + latestVersionOnly_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SingleColumnValueFilter) + } + + static { + defaultInstance = new SingleColumnValueFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SingleColumnValueFilter) + } + + public interface SkipFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .Filter filter = 1; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); + } + public static final class SkipFilter extends + com.google.protobuf.GeneratedMessage + implements SkipFilterOrBuilder { + // Use SkipFilter.newBuilder() to construct. + private SkipFilter(Builder builder) { + super(builder); + } + private SkipFilter(boolean noInit) {} + + private static final SkipFilter defaultInstance; + public static SkipFilter getDefaultInstance() { + return defaultInstance; + } + + public SkipFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .Filter filter = 1; + public static final int FILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + return filter_; + } + + private void initFields() { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, filter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, filter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) obj; + + boolean result = true; + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance()) return this; + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFilter()) { + + return false; + } + if (!getFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .Filter filter = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:SkipFilter) + } + + static { + defaultInstance = new SkipFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SkipFilter) + } + + public interface TimestampsFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated int64 timestamps = 1; + java.util.List getTimestampsList(); + int getTimestampsCount(); + long getTimestamps(int index); + } + public static final class TimestampsFilter extends + com.google.protobuf.GeneratedMessage + implements TimestampsFilterOrBuilder { + // Use TimestampsFilter.newBuilder() to construct. + private TimestampsFilter(Builder builder) { + super(builder); + } + private TimestampsFilter(boolean noInit) {} + + private static final TimestampsFilter defaultInstance; + public static TimestampsFilter getDefaultInstance() { + return defaultInstance; + } + + public TimestampsFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable; + } + + // repeated int64 timestamps = 1; + public static final int TIMESTAMPS_FIELD_NUMBER = 1; + private java.util.List timestamps_; + public java.util.List + getTimestampsList() { + return timestamps_; + } + public int getTimestampsCount() { + return timestamps_.size(); + } + public long getTimestamps(int index) { + return timestamps_.get(index); + } + + private void initFields() { + timestamps_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < timestamps_.size(); i++) { + output.writeInt64(1, timestamps_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < timestamps_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt64SizeNoTag(timestamps_.get(i)); + } + size += dataSize; + size += 1 * getTimestampsList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) obj; + + boolean result = true; + result = result && getTimestampsList() + .equals(other.getTimestampsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getTimestampsCount() > 0) { + hash = (37 * hash) + TIMESTAMPS_FIELD_NUMBER; + hash = (53 * hash) + getTimestampsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + timestamps_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + timestamps_ = java.util.Collections.unmodifiableList(timestamps_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.timestamps_ = timestamps_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance()) return this; + if (!other.timestamps_.isEmpty()) { + if (timestamps_.isEmpty()) { + timestamps_ = other.timestamps_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTimestampsIsMutable(); + timestamps_.addAll(other.timestamps_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + ensureTimestampsIsMutable(); + timestamps_.add(input.readInt64()); + break; + } + case 10: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + while (input.getBytesUntilLimit() > 0) { + addTimestamps(input.readInt64()); + } + input.popLimit(limit); + break; + } + } + } + } + + private int bitField0_; + + // repeated int64 timestamps = 1; + private java.util.List timestamps_ = java.util.Collections.emptyList();; + private void ensureTimestampsIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + timestamps_ = new java.util.ArrayList(timestamps_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getTimestampsList() { + return java.util.Collections.unmodifiableList(timestamps_); + } + public int getTimestampsCount() { + return timestamps_.size(); + } + public long getTimestamps(int index) { + return timestamps_.get(index); + } + public Builder setTimestamps( + int index, long value) { + ensureTimestampsIsMutable(); + timestamps_.set(index, value); + onChanged(); + return this; + } + public Builder addTimestamps(long value) { + ensureTimestampsIsMutable(); + timestamps_.add(value); + onChanged(); + return this; + } + public Builder addAllTimestamps( + java.lang.Iterable values) { + ensureTimestampsIsMutable(); + super.addAll(values, timestamps_); + onChanged(); + return this; + } + public Builder clearTimestamps() { + timestamps_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TimestampsFilter) + } + + static { + defaultInstance = new TimestampsFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TimestampsFilter) + } + + public interface ValueFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .CompareFilter compareFilter = 1; + boolean hasCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); + } + public static final class ValueFilter extends + com.google.protobuf.GeneratedMessage + implements ValueFilterOrBuilder { + // Use ValueFilter.newBuilder() to construct. + private ValueFilter(Builder builder) { + super(builder); + } + private ValueFilter(boolean noInit) {} + + private static final ValueFilter defaultInstance; + public static ValueFilter getDefaultInstance() { + return defaultInstance; + } + + public ValueFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .CompareFilter compareFilter = 1; + public static final int COMPAREFILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + return compareFilter_; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + return compareFilter_; + } + + private void initFields() { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCompareFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCompareFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, compareFilter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, compareFilter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) obj; + + boolean result = true; + result = result && (hasCompareFilter() == other.hasCompareFilter()); + if (hasCompareFilter()) { + result = result && getCompareFilter() + .equals(other.getCompareFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCompareFilter()) { + hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; + hash = (53 * hash) + getCompareFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCompareFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (compareFilterBuilder_ == null) { + result.compareFilter_ = compareFilter_; + } else { + result.compareFilter_ = compareFilterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance()) return this; + if (other.hasCompareFilter()) { + mergeCompareFilter(other.getCompareFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCompareFilter()) { + + return false; + } + if (!getCompareFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); + if (hasCompareFilter()) { + subBuilder.mergeFrom(getCompareFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setCompareFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .CompareFilter compareFilter = 1; + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; + public boolean hasCompareFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { + if (compareFilterBuilder_ == null) { + return compareFilter_; + } else { + return compareFilterBuilder_.getMessage(); + } + } + public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + compareFilter_ = value; + onChanged(); + } else { + compareFilterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setCompareFilter( + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { + if (compareFilterBuilder_ == null) { + compareFilter_ = builderForValue.build(); + onChanged(); + } else { + compareFilterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { + if (compareFilterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { + compareFilter_ = + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); + } else { + compareFilter_ = value; + } + onChanged(); + } else { + compareFilterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearCompareFilter() { + if (compareFilterBuilder_ == null) { + compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + onChanged(); + } else { + compareFilterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCompareFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { + if (compareFilterBuilder_ != null) { + return compareFilterBuilder_.getMessageOrBuilder(); + } else { + return compareFilter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> + getCompareFilterFieldBuilder() { + if (compareFilterBuilder_ == null) { + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( + compareFilter_, + getParentForChildren(), + isClean()); + compareFilter_ = null; + } + return compareFilterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ValueFilter) + } + + static { + defaultInstance = new ValueFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ValueFilter) + } + + public interface WhileMatchFilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .Filter filter = 1; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); + } + public static final class WhileMatchFilter extends + com.google.protobuf.GeneratedMessage + implements WhileMatchFilterOrBuilder { + // Use WhileMatchFilter.newBuilder() to construct. + private WhileMatchFilter(Builder builder) { + super(builder); + } + private WhileMatchFilter(boolean noInit) {} + + private static final WhileMatchFilter defaultInstance; + public static WhileMatchFilter getDefaultInstance() { + return defaultInstance; + } + + public WhileMatchFilter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable; + } + + private int bitField0_; + // required .Filter filter = 1; + public static final int FILTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + return filter_; + } + + private void initFields() { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFilter()) { + memoizedIsInitialized = 0; + return false; + } + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, filter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, filter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) obj; + + boolean result = true; + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter build() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance()) return this; + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFilter()) { + + return false; + } + if (!getFilter().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .Filter filter = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WhileMatchFilter) + } + + static { + defaultInstance = new WhileMatchFilter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WhileMatchFilter) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ColumnCountGetFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ColumnCountGetFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ColumnPaginationFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ColumnPaginationFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ColumnPrefixFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ColumnPrefixFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ColumnRangeFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ColumnRangeFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompareFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompareFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DependentColumnFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DependentColumnFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FamilyFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FamilyFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FilterList_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FilterList_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FilterWrapper_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FilterWrapper_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FirstKeyOnlyFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FirstKeyOnlyFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FuzzyRowFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FuzzyRowFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_InclusiveStopFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_InclusiveStopFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_KeyOnlyFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_KeyOnlyFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultipleColumnPrefixFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PageFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PageFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PrefixFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PrefixFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_QualifierFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_QualifierFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RandomRowFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RandomRowFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SingleColumnValueExcludeFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SingleColumnValueFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SingleColumnValueFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SkipFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SkipFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TimestampsFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TimestampsFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ValueFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ValueFilter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WhileMatchFilter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WhileMatchFilter_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Filter.proto\032\013hbase.proto\032\020Comparator." + + "proto\"%\n\024ColumnCountGetFilter\022\r\n\005limit\030\001" + + " \002(\005\"7\n\026ColumnPaginationFilter\022\r\n\005limit\030" + + "\001 \002(\005\022\016\n\006offset\030\002 \001(\005\"$\n\022ColumnPrefixFil" + + "ter\022\016\n\006prefix\030\001 \002(\014\"q\n\021ColumnRangeFilter" + + "\022\021\n\tminColumn\030\001 \001(\014\022\032\n\022minColumnInclusiv" + + "e\030\002 \001(\010\022\021\n\tmaxColumn\030\003 \001(\014\022\032\n\022maxColumnI" + + "nclusive\030\004 \001(\010\"Q\n\rCompareFilter\022\037\n\tcompa" + + "reOp\030\001 \002(\0162\014.CompareType\022\037\n\ncomparator\030\002" + + " \001(\0132\013.Comparator\"\212\001\n\025DependentColumnFil", + "ter\022%\n\rcompareFilter\030\001 \002(\0132\016.CompareFilt" + + "er\022\024\n\014columnFamily\030\002 \001(\014\022\027\n\017columnQualif" + + "ier\030\003 \001(\014\022\033\n\023dropDependentColumn\030\004 \001(\010\"5" + + "\n\014FamilyFilter\022%\n\rcompareFilter\030\001 \002(\0132\016." + + "CompareFilter\"\200\001\n\nFilterList\022&\n\010operator" + + "\030\001 \002(\0162\024.FilterList.Operator\022\030\n\007filters\030" + + "\002 \003(\0132\007.Filter\"0\n\010Operator\022\021\n\rMUST_PASS_" + + "ALL\020\001\022\021\n\rMUST_PASS_ONE\020\002\"(\n\rFilterWrappe" + + "r\022\027\n\006filter\030\001 \002(\0132\007.Filter\"\024\n\022FirstKeyOn" + + "lyFilter\";\n%FirstKeyValueMatchingQualifi", + "ersFilter\022\022\n\nqualifiers\030\001 \003(\014\"8\n\016FuzzyRo" + + "wFilter\022&\n\rfuzzyKeysData\030\001 \003(\0132\017.BytesBy" + + "tesPair\")\n\023InclusiveStopFilter\022\022\n\nstopRo" + + "wKey\030\001 \001(\014\"!\n\rKeyOnlyFilter\022\020\n\010lenAsVal\030" + + "\001 \002(\010\"4\n\032MultipleColumnPrefixFilter\022\026\n\016s" + + "ortedPrefixes\030\001 \003(\014\"\036\n\nPageFilter\022\020\n\010pag" + + "eSize\030\001 \002(\003\"\036\n\014PrefixFilter\022\016\n\006prefix\030\001 " + + "\001(\014\"8\n\017QualifierFilter\022%\n\rcompareFilter\030" + + "\001 \002(\0132\016.CompareFilter\"!\n\017RandomRowFilter" + + "\022\016\n\006chance\030\001 \002(\002\"2\n\tRowFilter\022%\n\rcompare", + "Filter\030\001 \002(\0132\016.CompareFilter\"[\n\036SingleCo" + + "lumnValueExcludeFilter\0229\n\027singleColumnVa" + + "lueFilter\030\001 \002(\0132\030.SingleColumnValueFilte" + + "r\"\352\001\n\027SingleColumnValueFilter\022\024\n\014columnF" + + "amily\030\001 \001(\014\022\027\n\017columnQualifier\030\002 \001(\014\022\037\n\t" + + "compareOp\030\003 \002(\0162\014.CompareType\022\037\n\ncompara" + + "tor\030\004 \002(\0132\013.Comparator\022\023\n\013foundColumn\030\005 " + + "\001(\010\022\025\n\rmatchedColumn\030\006 \001(\010\022\027\n\017filterIfMi" + + "ssing\030\007 \001(\010\022\031\n\021latestVersionOnly\030\010 \001(\010\"%" + + "\n\nSkipFilter\022\027\n\006filter\030\001 \002(\0132\007.Filter\"&\n", + "\020TimestampsFilter\022\022\n\ntimestamps\030\001 \003(\003\"4\n" + + "\013ValueFilter\022%\n\rcompareFilter\030\001 \002(\0132\016.Co" + + "mpareFilter\"+\n\020WhileMatchFilter\022\027\n\006filte" + + "r\030\001 \002(\0132\007.FilterBB\n*org.apache.hadoop.hb" + + "ase.protobuf.generatedB\014FilterProtosH\001\210\001" + + "\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_ColumnCountGetFilter_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_ColumnCountGetFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ColumnCountGetFilter_descriptor, + new java.lang.String[] { "Limit", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); + internal_static_ColumnPaginationFilter_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_ColumnPaginationFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ColumnPaginationFilter_descriptor, + new java.lang.String[] { "Limit", "Offset", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); + internal_static_ColumnPrefixFilter_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_ColumnPrefixFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ColumnPrefixFilter_descriptor, + new java.lang.String[] { "Prefix", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); + internal_static_ColumnRangeFilter_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_ColumnRangeFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ColumnRangeFilter_descriptor, + new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); + internal_static_CompareFilter_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_CompareFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompareFilter_descriptor, + new java.lang.String[] { "CompareOp", "Comparator", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); + internal_static_DependentColumnFilter_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_DependentColumnFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DependentColumnFilter_descriptor, + new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); + internal_static_FamilyFilter_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_FamilyFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FamilyFilter_descriptor, + new java.lang.String[] { "CompareFilter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); + internal_static_FilterList_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_FilterList_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FilterList_descriptor, + new java.lang.String[] { "Operator", "Filters", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); + internal_static_FilterWrapper_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_FilterWrapper_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FilterWrapper_descriptor, + new java.lang.String[] { "Filter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); + internal_static_FirstKeyOnlyFilter_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_FirstKeyOnlyFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FirstKeyOnlyFilter_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); + internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor, + new java.lang.String[] { "Qualifiers", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); + internal_static_FuzzyRowFilter_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_FuzzyRowFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FuzzyRowFilter_descriptor, + new java.lang.String[] { "FuzzyKeysData", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); + internal_static_InclusiveStopFilter_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_InclusiveStopFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_InclusiveStopFilter_descriptor, + new java.lang.String[] { "StopRowKey", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); + internal_static_KeyOnlyFilter_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_KeyOnlyFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_KeyOnlyFilter_descriptor, + new java.lang.String[] { "LenAsVal", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); + internal_static_MultipleColumnPrefixFilter_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_MultipleColumnPrefixFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultipleColumnPrefixFilter_descriptor, + new java.lang.String[] { "SortedPrefixes", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); + internal_static_PageFilter_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_PageFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PageFilter_descriptor, + new java.lang.String[] { "PageSize", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); + internal_static_PrefixFilter_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_PrefixFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PrefixFilter_descriptor, + new java.lang.String[] { "Prefix", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); + internal_static_QualifierFilter_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_QualifierFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_QualifierFilter_descriptor, + new java.lang.String[] { "CompareFilter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); + internal_static_RandomRowFilter_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_RandomRowFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RandomRowFilter_descriptor, + new java.lang.String[] { "Chance", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); + internal_static_RowFilter_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_RowFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowFilter_descriptor, + new java.lang.String[] { "CompareFilter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); + internal_static_SingleColumnValueExcludeFilter_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SingleColumnValueExcludeFilter_descriptor, + new java.lang.String[] { "SingleColumnValueFilter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); + internal_static_SingleColumnValueFilter_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_SingleColumnValueFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SingleColumnValueFilter_descriptor, + new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FoundColumn", "MatchedColumn", "FilterIfMissing", "LatestVersionOnly", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); + internal_static_SkipFilter_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_SkipFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SkipFilter_descriptor, + new java.lang.String[] { "Filter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); + internal_static_TimestampsFilter_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_TimestampsFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TimestampsFilter_descriptor, + new java.lang.String[] { "Timestamps", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); + internal_static_ValueFilter_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_ValueFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ValueFilter_descriptor, + new java.lang.String[] { "CompareFilter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); + internal_static_WhileMatchFilter_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_WhileMatchFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WhileMatchFilter_descriptor, + new java.lang.String[] { "Filter", }, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, + org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java new file mode 100644 index 0000000..6d37719 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -0,0 +1,11407 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: hbase.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class HBaseProtos { + private HBaseProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public enum CompareType + implements com.google.protobuf.ProtocolMessageEnum { + LESS(0, 0), + LESS_OR_EQUAL(1, 1), + EQUAL(2, 2), + NOT_EQUAL(3, 3), + GREATER_OR_EQUAL(4, 4), + GREATER(5, 5), + NO_OP(6, 6), + ; + + public static final int LESS_VALUE = 0; + public static final int LESS_OR_EQUAL_VALUE = 1; + public static final int EQUAL_VALUE = 2; + public static final int NOT_EQUAL_VALUE = 3; + public static final int GREATER_OR_EQUAL_VALUE = 4; + public static final int GREATER_VALUE = 5; + public static final int NO_OP_VALUE = 6; + + + public final int getNumber() { return value; } + + public static CompareType valueOf(int value) { + switch (value) { + case 0: return LESS; + case 1: return LESS_OR_EQUAL; + case 2: return EQUAL; + case 3: return NOT_EQUAL; + case 4: return GREATER_OR_EQUAL; + case 5: return GREATER; + case 6: return NO_OP; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public CompareType findValueByNumber(int number) { + return CompareType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final CompareType[] VALUES = { + LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, + }; + + public static CompareType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private CompareType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:CompareType) + } + + public enum KeyType + implements com.google.protobuf.ProtocolMessageEnum { + MINIMUM(0, 0), + PUT(1, 4), + DELETE(2, 8), + DELETE_COLUMN(3, 12), + DELETE_FAMILY(4, 14), + MAXIMUM(5, 255), + ; + + public static final int MINIMUM_VALUE = 0; + public static final int PUT_VALUE = 4; + public static final int DELETE_VALUE = 8; + public static final int DELETE_COLUMN_VALUE = 12; + public static final int DELETE_FAMILY_VALUE = 14; + public static final int MAXIMUM_VALUE = 255; + + + public final int getNumber() { return value; } + + public static KeyType valueOf(int value) { + switch (value) { + case 0: return MINIMUM; + case 4: return PUT; + case 8: return DELETE; + case 12: return DELETE_COLUMN; + case 14: return DELETE_FAMILY; + case 255: return MAXIMUM; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public KeyType findValueByNumber(int number) { + return KeyType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1); + } + + private static final KeyType[] VALUES = { + MINIMUM, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, MAXIMUM, + }; + + public static KeyType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private KeyType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:KeyType) + } + + public interface TableSchemaOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes name = 1; + boolean hasName(); + com.google.protobuf.ByteString getName(); + + // repeated .TableSchema.Attribute attributes = 2; + java.util.List + getAttributesList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getAttributes(int index); + int getAttributesCount(); + java.util.List + getAttributesOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder getAttributesOrBuilder( + int index); + + // repeated .ColumnFamilySchema columnFamilies = 3; + java.util.List + getColumnFamiliesList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index); + int getColumnFamiliesCount(); + java.util.List + getColumnFamiliesOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( + int index); + } + public static final class TableSchema extends + com.google.protobuf.GeneratedMessage + implements TableSchemaOrBuilder { + // Use TableSchema.newBuilder() to construct. + private TableSchema(Builder builder) { + super(builder); + } + private TableSchema(boolean noInit) {} + + private static final TableSchema defaultInstance; + public static TableSchema getDefaultInstance() { + return defaultInstance; + } + + public TableSchema getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable; + } + + public interface AttributeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes name = 1; + boolean hasName(); + com.google.protobuf.ByteString getName(); + + // required bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class Attribute extends + com.google.protobuf.GeneratedMessage + implements AttributeOrBuilder { + // Use Attribute.newBuilder() to construct. + private Attribute(Builder builder) { + super(builder); + } + private Attribute(boolean noInit) {} + + private static final Attribute defaultInstance; + public static Attribute getDefaultInstance() { + return defaultInstance; + } + + public Attribute getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_fieldAccessorTable; + } + + private int bitField0_; + // required bytes name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + + // required bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + name_ = com.google.protobuf.ByteString.EMPTY; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, name_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, name_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes name = 1; + private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + public Builder setName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + + // required bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TableSchema.Attribute) + } + + static { + defaultInstance = new Attribute(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TableSchema.Attribute) + } + + private int bitField0_; + // optional bytes name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + + // repeated .TableSchema.Attribute attributes = 2; + public static final int ATTRIBUTES_FIELD_NUMBER = 2; + private java.util.List attributes_; + public java.util.List getAttributesList() { + return attributes_; + } + public java.util.List + getAttributesOrBuilderList() { + return attributes_; + } + public int getAttributesCount() { + return attributes_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getAttributes(int index) { + return attributes_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder getAttributesOrBuilder( + int index) { + return attributes_.get(index); + } + + // repeated .ColumnFamilySchema columnFamilies = 3; + public static final int COLUMNFAMILIES_FIELD_NUMBER = 3; + private java.util.List columnFamilies_; + public java.util.List getColumnFamiliesList() { + return columnFamilies_; + } + public java.util.List + getColumnFamiliesOrBuilderList() { + return columnFamilies_; + } + public int getColumnFamiliesCount() { + return columnFamilies_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { + return columnFamilies_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( + int index) { + return columnFamilies_.get(index); + } + + private void initFields() { + name_ = com.google.protobuf.ByteString.EMPTY; + attributes_ = java.util.Collections.emptyList(); + columnFamilies_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getAttributesCount(); i++) { + if (!getAttributes(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getColumnFamiliesCount(); i++) { + if (!getColumnFamilies(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, name_); + } + for (int i = 0; i < attributes_.size(); i++) { + output.writeMessage(2, attributes_.get(i)); + } + for (int i = 0; i < columnFamilies_.size(); i++) { + output.writeMessage(3, columnFamilies_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, name_); + } + for (int i = 0; i < attributes_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, attributes_.get(i)); + } + for (int i = 0; i < columnFamilies_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, columnFamilies_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && getAttributesList() + .equals(other.getAttributesList()); + result = result && getColumnFamiliesList() + .equals(other.getColumnFamiliesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (getAttributesCount() > 0) { + hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; + hash = (53 * hash) + getAttributesList().hashCode(); + } + if (getColumnFamiliesCount() > 0) { + hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamiliesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getAttributesFieldBuilder(); + getColumnFamiliesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (attributesBuilder_ == null) { + attributes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + attributesBuilder_.clear(); + } + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + columnFamiliesBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (attributesBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = java.util.Collections.unmodifiableList(attributes_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.attributes_ = attributes_; + } else { + result.attributes_ = attributesBuilder_.build(); + } + if (columnFamiliesBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.columnFamilies_ = columnFamilies_; + } else { + result.columnFamilies_ = columnFamiliesBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (attributesBuilder_ == null) { + if (!other.attributes_.isEmpty()) { + if (attributes_.isEmpty()) { + attributes_ = other.attributes_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAttributesIsMutable(); + attributes_.addAll(other.attributes_); + } + onChanged(); + } + } else { + if (!other.attributes_.isEmpty()) { + if (attributesBuilder_.isEmpty()) { + attributesBuilder_.dispose(); + attributesBuilder_ = null; + attributes_ = other.attributes_; + bitField0_ = (bitField0_ & ~0x00000002); + attributesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributesFieldBuilder() : null; + } else { + attributesBuilder_.addAllMessages(other.attributes_); + } + } + } + if (columnFamiliesBuilder_ == null) { + if (!other.columnFamilies_.isEmpty()) { + if (columnFamilies_.isEmpty()) { + columnFamilies_ = other.columnFamilies_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureColumnFamiliesIsMutable(); + columnFamilies_.addAll(other.columnFamilies_); + } + onChanged(); + } + } else { + if (!other.columnFamilies_.isEmpty()) { + if (columnFamiliesBuilder_.isEmpty()) { + columnFamiliesBuilder_.dispose(); + columnFamiliesBuilder_ = null; + columnFamilies_ = other.columnFamilies_; + bitField0_ = (bitField0_ & ~0x00000004); + columnFamiliesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFamiliesFieldBuilder() : null; + } else { + columnFamiliesBuilder_.addAllMessages(other.columnFamilies_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getAttributesCount(); i++) { + if (!getAttributes(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getColumnFamiliesCount(); i++) { + if (!getColumnFamilies(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttributes(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumnFamilies(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes name = 1; + private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + public Builder setName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + + // repeated .TableSchema.Attribute attributes = 2; + private java.util.List attributes_ = + java.util.Collections.emptyList(); + private void ensureAttributesIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = new java.util.ArrayList(attributes_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder> attributesBuilder_; + + public java.util.List getAttributesList() { + if (attributesBuilder_ == null) { + return java.util.Collections.unmodifiableList(attributes_); + } else { + return attributesBuilder_.getMessageList(); + } + } + public int getAttributesCount() { + if (attributesBuilder_ == null) { + return attributes_.size(); + } else { + return attributesBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getAttributes(int index) { + if (attributesBuilder_ == null) { + return attributes_.get(index); + } else { + return attributesBuilder_.getMessage(index); + } + } + public Builder setAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute value) { + if (attributesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributesIsMutable(); + attributes_.set(index, value); + onChanged(); + } else { + attributesBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder builderForValue) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.set(index, builderForValue.build()); + onChanged(); + } else { + attributesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute value) { + if (attributesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributesIsMutable(); + attributes_.add(value); + onChanged(); + } else { + attributesBuilder_.addMessage(value); + } + return this; + } + public Builder addAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute value) { + if (attributesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributesIsMutable(); + attributes_.add(index, value); + onChanged(); + } else { + attributesBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttributes( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder builderForValue) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.add(builderForValue.build()); + onChanged(); + } else { + attributesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder builderForValue) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.add(index, builderForValue.build()); + onChanged(); + } else { + attributesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttributes( + java.lang.Iterable values) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + super.addAll(values, attributes_); + onChanged(); + } else { + attributesBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttributes() { + if (attributesBuilder_ == null) { + attributes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + attributesBuilder_.clear(); + } + return this; + } + public Builder removeAttributes(int index) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.remove(index); + onChanged(); + } else { + attributesBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder getAttributesBuilder( + int index) { + return getAttributesFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder getAttributesOrBuilder( + int index) { + if (attributesBuilder_ == null) { + return attributes_.get(index); } else { + return attributesBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributesOrBuilderList() { + if (attributesBuilder_ != null) { + return attributesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attributes_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder addAttributesBuilder() { + return getAttributesFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder addAttributesBuilder( + int index) { + return getAttributesFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance()); + } + public java.util.List + getAttributesBuilderList() { + return getAttributesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder> + getAttributesFieldBuilder() { + if (attributesBuilder_ == null) { + attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder>( + attributes_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + attributes_ = null; + } + return attributesBuilder_; + } + + // repeated .ColumnFamilySchema columnFamilies = 3; + private java.util.List columnFamilies_ = + java.util.Collections.emptyList(); + private void ensureColumnFamiliesIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + columnFamilies_ = new java.util.ArrayList(columnFamilies_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + + public java.util.List getColumnFamiliesList() { + if (columnFamiliesBuilder_ == null) { + return java.util.Collections.unmodifiableList(columnFamilies_); + } else { + return columnFamiliesBuilder_.getMessageList(); + } + } + public int getColumnFamiliesCount() { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_.size(); + } else { + return columnFamiliesBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_.get(index); + } else { + return columnFamiliesBuilder_.getMessage(index); + } + } + public Builder setColumnFamilies( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamiliesIsMutable(); + columnFamilies_.set(index, value); + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumnFamilies( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + ensureColumnFamiliesIsMutable(); + columnFamilies_.set(index, builderForValue.build()); + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamiliesIsMutable(); + columnFamilies_.add(value); + onChanged(); + } else { + columnFamiliesBuilder_.addMessage(value); + } + return this; + } + public Builder addColumnFamilies( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamiliesIsMutable(); + columnFamilies_.add(index, value); + onChanged(); + } else { + columnFamiliesBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumnFamilies( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + ensureColumnFamiliesIsMutable(); + columnFamilies_.add(builderForValue.build()); + onChanged(); + } else { + columnFamiliesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumnFamilies( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + ensureColumnFamiliesIsMutable(); + columnFamilies_.add(index, builderForValue.build()); + onChanged(); + } else { + columnFamiliesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumnFamilies( + java.lang.Iterable values) { + if (columnFamiliesBuilder_ == null) { + ensureColumnFamiliesIsMutable(); + super.addAll(values, columnFamilies_); + onChanged(); + } else { + columnFamiliesBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + columnFamiliesBuilder_.clear(); + } + return this; + } + public Builder removeColumnFamilies(int index) { + if (columnFamiliesBuilder_ == null) { + ensureColumnFamiliesIsMutable(); + columnFamilies_.remove(index); + onChanged(); + } else { + columnFamiliesBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder( + int index) { + return getColumnFamiliesFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( + int index) { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_.get(index); } else { + return columnFamiliesBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnFamiliesOrBuilderList() { + if (columnFamiliesBuilder_ != null) { + return columnFamiliesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(columnFamilies_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() { + return getColumnFamiliesFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder( + int index) { + return getColumnFamiliesFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); + } + public java.util.List + getColumnFamiliesBuilderList() { + return getColumnFamiliesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> + getColumnFamiliesFieldBuilder() { + if (columnFamiliesBuilder_ == null) { + columnFamiliesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( + columnFamilies_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + columnFamilies_ = null; + } + return columnFamiliesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:TableSchema) + } + + static { + defaultInstance = new TableSchema(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TableSchema) + } + + public interface ColumnFamilySchemaOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes name = 1; + boolean hasName(); + com.google.protobuf.ByteString getName(); + + // repeated .ColumnFamilySchema.Attribute attributes = 2; + java.util.List + getAttributesList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getAttributes(int index); + int getAttributesCount(); + java.util.List + getAttributesOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder getAttributesOrBuilder( + int index); + } + public static final class ColumnFamilySchema extends + com.google.protobuf.GeneratedMessage + implements ColumnFamilySchemaOrBuilder { + // Use ColumnFamilySchema.newBuilder() to construct. + private ColumnFamilySchema(Builder builder) { + super(builder); + } + private ColumnFamilySchema(boolean noInit) {} + + private static final ColumnFamilySchema defaultInstance; + public static ColumnFamilySchema getDefaultInstance() { + return defaultInstance; + } + + public ColumnFamilySchema getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable; + } + + public interface AttributeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes name = 1; + boolean hasName(); + com.google.protobuf.ByteString getName(); + + // required bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class Attribute extends + com.google.protobuf.GeneratedMessage + implements AttributeOrBuilder { + // Use Attribute.newBuilder() to construct. + private Attribute(Builder builder) { + super(builder); + } + private Attribute(boolean noInit) {} + + private static final Attribute defaultInstance; + public static Attribute getDefaultInstance() { + return defaultInstance; + } + + public Attribute getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable; + } + + private int bitField0_; + // required bytes name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + + // required bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + name_ = com.google.protobuf.ByteString.EMPTY; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, name_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, name_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes name = 1; + private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + public Builder setName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + + // required bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ColumnFamilySchema.Attribute) + } + + static { + defaultInstance = new Attribute(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ColumnFamilySchema.Attribute) + } + + private int bitField0_; + // required bytes name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + + // repeated .ColumnFamilySchema.Attribute attributes = 2; + public static final int ATTRIBUTES_FIELD_NUMBER = 2; + private java.util.List attributes_; + public java.util.List getAttributesList() { + return attributes_; + } + public java.util.List + getAttributesOrBuilderList() { + return attributes_; + } + public int getAttributesCount() { + return attributes_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getAttributes(int index) { + return attributes_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder getAttributesOrBuilder( + int index) { + return attributes_.get(index); + } + + private void initFields() { + name_ = com.google.protobuf.ByteString.EMPTY; + attributes_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getAttributesCount(); i++) { + if (!getAttributes(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, name_); + } + for (int i = 0; i < attributes_.size(); i++) { + output.writeMessage(2, attributes_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, name_); + } + for (int i = 0; i < attributes_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, attributes_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && getAttributesList() + .equals(other.getAttributesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (getAttributesCount() > 0) { + hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; + hash = (53 * hash) + getAttributesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getAttributesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (attributesBuilder_ == null) { + attributes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + attributesBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (attributesBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = java.util.Collections.unmodifiableList(attributes_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.attributes_ = attributes_; + } else { + result.attributes_ = attributesBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (attributesBuilder_ == null) { + if (!other.attributes_.isEmpty()) { + if (attributes_.isEmpty()) { + attributes_ = other.attributes_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAttributesIsMutable(); + attributes_.addAll(other.attributes_); + } + onChanged(); + } + } else { + if (!other.attributes_.isEmpty()) { + if (attributesBuilder_.isEmpty()) { + attributesBuilder_.dispose(); + attributesBuilder_ = null; + attributes_ = other.attributes_; + bitField0_ = (bitField0_ & ~0x00000002); + attributesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributesFieldBuilder() : null; + } else { + attributesBuilder_.addAllMessages(other.attributes_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + for (int i = 0; i < getAttributesCount(); i++) { + if (!getAttributes(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttributes(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes name = 1; + private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getName() { + return name_; + } + public Builder setName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + + // repeated .ColumnFamilySchema.Attribute attributes = 2; + private java.util.List attributes_ = + java.util.Collections.emptyList(); + private void ensureAttributesIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + attributes_ = new java.util.ArrayList(attributes_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder> attributesBuilder_; + + public java.util.List getAttributesList() { + if (attributesBuilder_ == null) { + return java.util.Collections.unmodifiableList(attributes_); + } else { + return attributesBuilder_.getMessageList(); + } + } + public int getAttributesCount() { + if (attributesBuilder_ == null) { + return attributes_.size(); + } else { + return attributesBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getAttributes(int index) { + if (attributesBuilder_ == null) { + return attributes_.get(index); + } else { + return attributesBuilder_.getMessage(index); + } + } + public Builder setAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute value) { + if (attributesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributesIsMutable(); + attributes_.set(index, value); + onChanged(); + } else { + attributesBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder builderForValue) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.set(index, builderForValue.build()); + onChanged(); + } else { + attributesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute value) { + if (attributesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributesIsMutable(); + attributes_.add(value); + onChanged(); + } else { + attributesBuilder_.addMessage(value); + } + return this; + } + public Builder addAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute value) { + if (attributesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributesIsMutable(); + attributes_.add(index, value); + onChanged(); + } else { + attributesBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttributes( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder builderForValue) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.add(builderForValue.build()); + onChanged(); + } else { + attributesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttributes( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder builderForValue) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.add(index, builderForValue.build()); + onChanged(); + } else { + attributesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttributes( + java.lang.Iterable values) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + super.addAll(values, attributes_); + onChanged(); + } else { + attributesBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttributes() { + if (attributesBuilder_ == null) { + attributes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + attributesBuilder_.clear(); + } + return this; + } + public Builder removeAttributes(int index) { + if (attributesBuilder_ == null) { + ensureAttributesIsMutable(); + attributes_.remove(index); + onChanged(); + } else { + attributesBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder getAttributesBuilder( + int index) { + return getAttributesFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder getAttributesOrBuilder( + int index) { + if (attributesBuilder_ == null) { + return attributes_.get(index); } else { + return attributesBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributesOrBuilderList() { + if (attributesBuilder_ != null) { + return attributesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attributes_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder addAttributesBuilder() { + return getAttributesFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder addAttributesBuilder( + int index) { + return getAttributesFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance()); + } + public java.util.List + getAttributesBuilderList() { + return getAttributesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder> + getAttributesFieldBuilder() { + if (attributesBuilder_ == null) { + attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder>( + attributes_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + attributes_ = null; + } + return attributesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ColumnFamilySchema) + } + + static { + defaultInstance = new ColumnFamilySchema(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ColumnFamilySchema) + } + + public interface RegionInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 regionId = 1; + boolean hasRegionId(); + long getRegionId(); + + // required bytes tableName = 2; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // optional bytes startKey = 3; + boolean hasStartKey(); + com.google.protobuf.ByteString getStartKey(); + + // optional bytes endKey = 4; + boolean hasEndKey(); + com.google.protobuf.ByteString getEndKey(); + + // optional bool offline = 5; + boolean hasOffline(); + boolean getOffline(); + + // optional bool split = 6; + boolean hasSplit(); + boolean getSplit(); + } + public static final class RegionInfo extends + com.google.protobuf.GeneratedMessage + implements RegionInfoOrBuilder { + // Use RegionInfo.newBuilder() to construct. + private RegionInfo(Builder builder) { + super(builder); + } + private RegionInfo(boolean noInit) {} + + private static final RegionInfo defaultInstance; + public static RegionInfo getDefaultInstance() { + return defaultInstance; + } + + public RegionInfo getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 regionId = 1; + public static final int REGIONID_FIELD_NUMBER = 1; + private long regionId_; + public boolean hasRegionId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getRegionId() { + return regionId_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // optional bytes startKey = 3; + public static final int STARTKEY_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString startKey_; + public boolean hasStartKey() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartKey() { + return startKey_; + } + + // optional bytes endKey = 4; + public static final int ENDKEY_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString endKey_; + public boolean hasEndKey() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getEndKey() { + return endKey_; + } + + // optional bool offline = 5; + public static final int OFFLINE_FIELD_NUMBER = 5; + private boolean offline_; + public boolean hasOffline() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getOffline() { + return offline_; + } + + // optional bool split = 6; + public static final int SPLIT_FIELD_NUMBER = 6; + private boolean split_; + public boolean hasSplit() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getSplit() { + return split_; + } + + private void initFields() { + regionId_ = 0L; + tableName_ = com.google.protobuf.ByteString.EMPTY; + startKey_ = com.google.protobuf.ByteString.EMPTY; + endKey_ = com.google.protobuf.ByteString.EMPTY; + offline_ = false; + split_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionId()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, regionId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, startKey_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, endKey_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, offline_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(6, split_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, regionId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, startKey_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, endKey_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, offline_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, split_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) obj; + + boolean result = true; + result = result && (hasRegionId() == other.hasRegionId()); + if (hasRegionId()) { + result = result && (getRegionId() + == other.getRegionId()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasStartKey() == other.hasStartKey()); + if (hasStartKey()) { + result = result && getStartKey() + .equals(other.getStartKey()); + } + result = result && (hasEndKey() == other.hasEndKey()); + if (hasEndKey()) { + result = result && getEndKey() + .equals(other.getEndKey()); + } + result = result && (hasOffline() == other.hasOffline()); + if (hasOffline()) { + result = result && (getOffline() + == other.getOffline()); + } + result = result && (hasSplit() == other.hasSplit()); + if (hasSplit()) { + result = result && (getSplit() + == other.getSplit()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionId()) { + hash = (37 * hash) + REGIONID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getRegionId()); + } + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasStartKey()) { + hash = (37 * hash) + STARTKEY_FIELD_NUMBER; + hash = (53 * hash) + getStartKey().hashCode(); + } + if (hasEndKey()) { + hash = (37 * hash) + ENDKEY_FIELD_NUMBER; + hash = (53 * hash) + getEndKey().hashCode(); + } + if (hasOffline()) { + hash = (37 * hash) + OFFLINE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getOffline()); + } + if (hasSplit()) { + hash = (37 * hash) + SPLIT_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getSplit()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + startKey_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + endKey_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + offline_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + split_ = false; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.regionId_ = regionId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.startKey_ = startKey_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.endKey_ = endKey_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.offline_ = offline_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.split_ = split_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; + if (other.hasRegionId()) { + setRegionId(other.getRegionId()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasStartKey()) { + setStartKey(other.getStartKey()); + } + if (other.hasEndKey()) { + setEndKey(other.getEndKey()); + } + if (other.hasOffline()) { + setOffline(other.getOffline()); + } + if (other.hasSplit()) { + setSplit(other.getSplit()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionId()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + regionId_ = input.readUInt64(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + startKey_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + endKey_ = input.readBytes(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + offline_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + split_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 regionId = 1; + private long regionId_ ; + public boolean hasRegionId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getRegionId() { + return regionId_; + } + public Builder setRegionId(long value) { + bitField0_ |= 0x00000001; + regionId_ = value; + onChanged(); + return this; + } + public Builder clearRegionId() { + bitField0_ = (bitField0_ & ~0x00000001); + regionId_ = 0L; + onChanged(); + return this; + } + + // required bytes tableName = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // optional bytes startKey = 3; + private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStartKey() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartKey() { + return startKey_; + } + public Builder setStartKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + startKey_ = value; + onChanged(); + return this; + } + public Builder clearStartKey() { + bitField0_ = (bitField0_ & ~0x00000004); + startKey_ = getDefaultInstance().getStartKey(); + onChanged(); + return this; + } + + // optional bytes endKey = 4; + private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEndKey() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getEndKey() { + return endKey_; + } + public Builder setEndKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + endKey_ = value; + onChanged(); + return this; + } + public Builder clearEndKey() { + bitField0_ = (bitField0_ & ~0x00000008); + endKey_ = getDefaultInstance().getEndKey(); + onChanged(); + return this; + } + + // optional bool offline = 5; + private boolean offline_ ; + public boolean hasOffline() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getOffline() { + return offline_; + } + public Builder setOffline(boolean value) { + bitField0_ |= 0x00000010; + offline_ = value; + onChanged(); + return this; + } + public Builder clearOffline() { + bitField0_ = (bitField0_ & ~0x00000010); + offline_ = false; + onChanged(); + return this; + } + + // optional bool split = 6; + private boolean split_ ; + public boolean hasSplit() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getSplit() { + return split_; + } + public Builder setSplit(boolean value) { + bitField0_ |= 0x00000020; + split_ = value; + onChanged(); + return this; + } + public Builder clearSplit() { + bitField0_ = (bitField0_ & ~0x00000020); + split_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionInfo) + } + + static { + defaultInstance = new RegionInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionInfo) + } + + public interface RegionSpecifierOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier.RegionSpecifierType type = 1; + boolean hasType(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); + + // required bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class RegionSpecifier extends + com.google.protobuf.GeneratedMessage + implements RegionSpecifierOrBuilder { + // Use RegionSpecifier.newBuilder() to construct. + private RegionSpecifier(Builder builder) { + super(builder); + } + private RegionSpecifier(boolean noInit) {} + + private static final RegionSpecifier defaultInstance; + public static RegionSpecifier getDefaultInstance() { + return defaultInstance; + } + + public RegionSpecifier getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; + } + + public enum RegionSpecifierType + implements com.google.protobuf.ProtocolMessageEnum { + REGION_NAME(0, 1), + ENCODED_REGION_NAME(1, 2), + ; + + public static final int REGION_NAME_VALUE = 1; + public static final int ENCODED_REGION_NAME_VALUE = 2; + + + public final int getNumber() { return value; } + + public static RegionSpecifierType valueOf(int value) { + switch (value) { + case 1: return REGION_NAME; + case 2: return ENCODED_REGION_NAME; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public RegionSpecifierType findValueByNumber(int number) { + return RegionSpecifierType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); + } + + private static final RegionSpecifierType[] VALUES = { + REGION_NAME, ENCODED_REGION_NAME, + }; + + public static RegionSpecifierType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private RegionSpecifierType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:RegionSpecifier.RegionSpecifierType) + } + + private int bitField0_; + // required .RegionSpecifier.RegionSpecifierType type = 1; + public static final int TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; + public boolean hasType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { + return type_; + } + + // required bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, type_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, type_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) obj; + + boolean result = true; + result = result && (hasType() == other.hasType()); + if (hasType()) { + result = result && + (getType() == other.getType()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getType()); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.type_ = type_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; + if (other.hasType()) { + setType(other.getType()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasType()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + type_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier.RegionSpecifierType type = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + public boolean hasType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { + return type_; + } + public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + type_ = value; + onChanged(); + return this; + } + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000001); + type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + onChanged(); + return this; + } + + // required bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionSpecifier) + } + + static { + defaultInstance = new RegionSpecifier(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionSpecifier) + } + + public interface RegionLoadOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier regionSpecifier = 1; + boolean hasRegionSpecifier(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder(); + + // optional uint32 stores = 2; + boolean hasStores(); + int getStores(); + + // optional uint32 storefiles = 3; + boolean hasStorefiles(); + int getStorefiles(); + + // optional uint32 storeUncompressedSizeMB = 4; + boolean hasStoreUncompressedSizeMB(); + int getStoreUncompressedSizeMB(); + + // optional uint32 storefileSizeMB = 5; + boolean hasStorefileSizeMB(); + int getStorefileSizeMB(); + + // optional uint32 memstoreSizeMB = 6; + boolean hasMemstoreSizeMB(); + int getMemstoreSizeMB(); + + // optional uint32 storefileIndexSizeMB = 7; + boolean hasStorefileIndexSizeMB(); + int getStorefileIndexSizeMB(); + + // optional uint64 readRequestsCount = 8; + boolean hasReadRequestsCount(); + long getReadRequestsCount(); + + // optional uint64 writeRequestsCount = 9; + boolean hasWriteRequestsCount(); + long getWriteRequestsCount(); + + // optional uint64 totalCompactingKVs = 10; + boolean hasTotalCompactingKVs(); + long getTotalCompactingKVs(); + + // optional uint64 currentCompactedKVs = 11; + boolean hasCurrentCompactedKVs(); + long getCurrentCompactedKVs(); + + // optional uint32 rootIndexSizeKB = 12; + boolean hasRootIndexSizeKB(); + int getRootIndexSizeKB(); + + // optional uint32 totalStaticIndexSizeKB = 13; + boolean hasTotalStaticIndexSizeKB(); + int getTotalStaticIndexSizeKB(); + + // optional uint32 totalStaticBloomSizeKB = 14; + boolean hasTotalStaticBloomSizeKB(); + int getTotalStaticBloomSizeKB(); + + // repeated .Coprocessor coprocessors = 15; + java.util.List + getCoprocessorsList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index); + int getCoprocessorsCount(); + java.util.List + getCoprocessorsOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( + int index); + + // optional uint64 completeSequenceId = 16; + boolean hasCompleteSequenceId(); + long getCompleteSequenceId(); + } + public static final class RegionLoad extends + com.google.protobuf.GeneratedMessage + implements RegionLoadOrBuilder { + // Use RegionLoad.newBuilder() to construct. + private RegionLoad(Builder builder) { + super(builder); + } + private RegionLoad(boolean noInit) {} + + private static final RegionLoad defaultInstance; + public static RegionLoad getDefaultInstance() { + return defaultInstance; + } + + public RegionLoad getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier regionSpecifier = 1; + public static final int REGIONSPECIFIER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_; + public boolean hasRegionSpecifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { + return regionSpecifier_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { + return regionSpecifier_; + } + + // optional uint32 stores = 2; + public static final int STORES_FIELD_NUMBER = 2; + private int stores_; + public boolean hasStores() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getStores() { + return stores_; + } + + // optional uint32 storefiles = 3; + public static final int STOREFILES_FIELD_NUMBER = 3; + private int storefiles_; + public boolean hasStorefiles() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getStorefiles() { + return storefiles_; + } + + // optional uint32 storeUncompressedSizeMB = 4; + public static final int STOREUNCOMPRESSEDSIZEMB_FIELD_NUMBER = 4; + private int storeUncompressedSizeMB_; + public boolean hasStoreUncompressedSizeMB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getStoreUncompressedSizeMB() { + return storeUncompressedSizeMB_; + } + + // optional uint32 storefileSizeMB = 5; + public static final int STOREFILESIZEMB_FIELD_NUMBER = 5; + private int storefileSizeMB_; + public boolean hasStorefileSizeMB() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getStorefileSizeMB() { + return storefileSizeMB_; + } + + // optional uint32 memstoreSizeMB = 6; + public static final int MEMSTORESIZEMB_FIELD_NUMBER = 6; + private int memstoreSizeMB_; + public boolean hasMemstoreSizeMB() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public int getMemstoreSizeMB() { + return memstoreSizeMB_; + } + + // optional uint32 storefileIndexSizeMB = 7; + public static final int STOREFILEINDEXSIZEMB_FIELD_NUMBER = 7; + private int storefileIndexSizeMB_; + public boolean hasStorefileIndexSizeMB() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getStorefileIndexSizeMB() { + return storefileIndexSizeMB_; + } + + // optional uint64 readRequestsCount = 8; + public static final int READREQUESTSCOUNT_FIELD_NUMBER = 8; + private long readRequestsCount_; + public boolean hasReadRequestsCount() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public long getReadRequestsCount() { + return readRequestsCount_; + } + + // optional uint64 writeRequestsCount = 9; + public static final int WRITEREQUESTSCOUNT_FIELD_NUMBER = 9; + private long writeRequestsCount_; + public boolean hasWriteRequestsCount() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public long getWriteRequestsCount() { + return writeRequestsCount_; + } + + // optional uint64 totalCompactingKVs = 10; + public static final int TOTALCOMPACTINGKVS_FIELD_NUMBER = 10; + private long totalCompactingKVs_; + public boolean hasTotalCompactingKVs() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + public long getTotalCompactingKVs() { + return totalCompactingKVs_; + } + + // optional uint64 currentCompactedKVs = 11; + public static final int CURRENTCOMPACTEDKVS_FIELD_NUMBER = 11; + private long currentCompactedKVs_; + public boolean hasCurrentCompactedKVs() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + public long getCurrentCompactedKVs() { + return currentCompactedKVs_; + } + + // optional uint32 rootIndexSizeKB = 12; + public static final int ROOTINDEXSIZEKB_FIELD_NUMBER = 12; + private int rootIndexSizeKB_; + public boolean hasRootIndexSizeKB() { + return ((bitField0_ & 0x00000800) == 0x00000800); + } + public int getRootIndexSizeKB() { + return rootIndexSizeKB_; + } + + // optional uint32 totalStaticIndexSizeKB = 13; + public static final int TOTALSTATICINDEXSIZEKB_FIELD_NUMBER = 13; + private int totalStaticIndexSizeKB_; + public boolean hasTotalStaticIndexSizeKB() { + return ((bitField0_ & 0x00001000) == 0x00001000); + } + public int getTotalStaticIndexSizeKB() { + return totalStaticIndexSizeKB_; + } + + // optional uint32 totalStaticBloomSizeKB = 14; + public static final int TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER = 14; + private int totalStaticBloomSizeKB_; + public boolean hasTotalStaticBloomSizeKB() { + return ((bitField0_ & 0x00002000) == 0x00002000); + } + public int getTotalStaticBloomSizeKB() { + return totalStaticBloomSizeKB_; + } + + // repeated .Coprocessor coprocessors = 15; + public static final int COPROCESSORS_FIELD_NUMBER = 15; + private java.util.List coprocessors_; + public java.util.List getCoprocessorsList() { + return coprocessors_; + } + public java.util.List + getCoprocessorsOrBuilderList() { + return coprocessors_; + } + public int getCoprocessorsCount() { + return coprocessors_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { + return coprocessors_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( + int index) { + return coprocessors_.get(index); + } + + // optional uint64 completeSequenceId = 16; + public static final int COMPLETESEQUENCEID_FIELD_NUMBER = 16; + private long completeSequenceId_; + public boolean hasCompleteSequenceId() { + return ((bitField0_ & 0x00004000) == 0x00004000); + } + public long getCompleteSequenceId() { + return completeSequenceId_; + } + + private void initFields() { + regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + stores_ = 0; + storefiles_ = 0; + storeUncompressedSizeMB_ = 0; + storefileSizeMB_ = 0; + memstoreSizeMB_ = 0; + storefileIndexSizeMB_ = 0; + readRequestsCount_ = 0L; + writeRequestsCount_ = 0L; + totalCompactingKVs_ = 0L; + currentCompactedKVs_ = 0L; + rootIndexSizeKB_ = 0; + totalStaticIndexSizeKB_ = 0; + totalStaticBloomSizeKB_ = 0; + coprocessors_ = java.util.Collections.emptyList(); + completeSequenceId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionSpecifier()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegionSpecifier().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getCoprocessorsCount(); i++) { + if (!getCoprocessors(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, regionSpecifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, stores_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt32(3, storefiles_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt32(4, storeUncompressedSizeMB_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(5, storefileSizeMB_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeUInt32(6, memstoreSizeMB_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt32(7, storefileIndexSizeMB_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeUInt64(8, readRequestsCount_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + output.writeUInt64(9, writeRequestsCount_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + output.writeUInt64(10, totalCompactingKVs_); + } + if (((bitField0_ & 0x00000400) == 0x00000400)) { + output.writeUInt64(11, currentCompactedKVs_); + } + if (((bitField0_ & 0x00000800) == 0x00000800)) { + output.writeUInt32(12, rootIndexSizeKB_); + } + if (((bitField0_ & 0x00001000) == 0x00001000)) { + output.writeUInt32(13, totalStaticIndexSizeKB_); + } + if (((bitField0_ & 0x00002000) == 0x00002000)) { + output.writeUInt32(14, totalStaticBloomSizeKB_); + } + for (int i = 0; i < coprocessors_.size(); i++) { + output.writeMessage(15, coprocessors_.get(i)); + } + if (((bitField0_ & 0x00004000) == 0x00004000)) { + output.writeUInt64(16, completeSequenceId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionSpecifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, stores_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(3, storefiles_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, storeUncompressedSizeMB_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(5, storefileSizeMB_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(6, memstoreSizeMB_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, storefileIndexSizeMB_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(8, readRequestsCount_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(9, writeRequestsCount_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(10, totalCompactingKVs_); + } + if (((bitField0_ & 0x00000400) == 0x00000400)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(11, currentCompactedKVs_); + } + if (((bitField0_ & 0x00000800) == 0x00000800)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(12, rootIndexSizeKB_); + } + if (((bitField0_ & 0x00001000) == 0x00001000)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(13, totalStaticIndexSizeKB_); + } + if (((bitField0_ & 0x00002000) == 0x00002000)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(14, totalStaticBloomSizeKB_); + } + for (int i = 0; i < coprocessors_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(15, coprocessors_.get(i)); + } + if (((bitField0_ & 0x00004000) == 0x00004000)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(16, completeSequenceId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) obj; + + boolean result = true; + result = result && (hasRegionSpecifier() == other.hasRegionSpecifier()); + if (hasRegionSpecifier()) { + result = result && getRegionSpecifier() + .equals(other.getRegionSpecifier()); + } + result = result && (hasStores() == other.hasStores()); + if (hasStores()) { + result = result && (getStores() + == other.getStores()); + } + result = result && (hasStorefiles() == other.hasStorefiles()); + if (hasStorefiles()) { + result = result && (getStorefiles() + == other.getStorefiles()); + } + result = result && (hasStoreUncompressedSizeMB() == other.hasStoreUncompressedSizeMB()); + if (hasStoreUncompressedSizeMB()) { + result = result && (getStoreUncompressedSizeMB() + == other.getStoreUncompressedSizeMB()); + } + result = result && (hasStorefileSizeMB() == other.hasStorefileSizeMB()); + if (hasStorefileSizeMB()) { + result = result && (getStorefileSizeMB() + == other.getStorefileSizeMB()); + } + result = result && (hasMemstoreSizeMB() == other.hasMemstoreSizeMB()); + if (hasMemstoreSizeMB()) { + result = result && (getMemstoreSizeMB() + == other.getMemstoreSizeMB()); + } + result = result && (hasStorefileIndexSizeMB() == other.hasStorefileIndexSizeMB()); + if (hasStorefileIndexSizeMB()) { + result = result && (getStorefileIndexSizeMB() + == other.getStorefileIndexSizeMB()); + } + result = result && (hasReadRequestsCount() == other.hasReadRequestsCount()); + if (hasReadRequestsCount()) { + result = result && (getReadRequestsCount() + == other.getReadRequestsCount()); + } + result = result && (hasWriteRequestsCount() == other.hasWriteRequestsCount()); + if (hasWriteRequestsCount()) { + result = result && (getWriteRequestsCount() + == other.getWriteRequestsCount()); + } + result = result && (hasTotalCompactingKVs() == other.hasTotalCompactingKVs()); + if (hasTotalCompactingKVs()) { + result = result && (getTotalCompactingKVs() + == other.getTotalCompactingKVs()); + } + result = result && (hasCurrentCompactedKVs() == other.hasCurrentCompactedKVs()); + if (hasCurrentCompactedKVs()) { + result = result && (getCurrentCompactedKVs() + == other.getCurrentCompactedKVs()); + } + result = result && (hasRootIndexSizeKB() == other.hasRootIndexSizeKB()); + if (hasRootIndexSizeKB()) { + result = result && (getRootIndexSizeKB() + == other.getRootIndexSizeKB()); + } + result = result && (hasTotalStaticIndexSizeKB() == other.hasTotalStaticIndexSizeKB()); + if (hasTotalStaticIndexSizeKB()) { + result = result && (getTotalStaticIndexSizeKB() + == other.getTotalStaticIndexSizeKB()); + } + result = result && (hasTotalStaticBloomSizeKB() == other.hasTotalStaticBloomSizeKB()); + if (hasTotalStaticBloomSizeKB()) { + result = result && (getTotalStaticBloomSizeKB() + == other.getTotalStaticBloomSizeKB()); + } + result = result && getCoprocessorsList() + .equals(other.getCoprocessorsList()); + result = result && (hasCompleteSequenceId() == other.hasCompleteSequenceId()); + if (hasCompleteSequenceId()) { + result = result && (getCompleteSequenceId() + == other.getCompleteSequenceId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionSpecifier()) { + hash = (37 * hash) + REGIONSPECIFIER_FIELD_NUMBER; + hash = (53 * hash) + getRegionSpecifier().hashCode(); + } + if (hasStores()) { + hash = (37 * hash) + STORES_FIELD_NUMBER; + hash = (53 * hash) + getStores(); + } + if (hasStorefiles()) { + hash = (37 * hash) + STOREFILES_FIELD_NUMBER; + hash = (53 * hash) + getStorefiles(); + } + if (hasStoreUncompressedSizeMB()) { + hash = (37 * hash) + STOREUNCOMPRESSEDSIZEMB_FIELD_NUMBER; + hash = (53 * hash) + getStoreUncompressedSizeMB(); + } + if (hasStorefileSizeMB()) { + hash = (37 * hash) + STOREFILESIZEMB_FIELD_NUMBER; + hash = (53 * hash) + getStorefileSizeMB(); + } + if (hasMemstoreSizeMB()) { + hash = (37 * hash) + MEMSTORESIZEMB_FIELD_NUMBER; + hash = (53 * hash) + getMemstoreSizeMB(); + } + if (hasStorefileIndexSizeMB()) { + hash = (37 * hash) + STOREFILEINDEXSIZEMB_FIELD_NUMBER; + hash = (53 * hash) + getStorefileIndexSizeMB(); + } + if (hasReadRequestsCount()) { + hash = (37 * hash) + READREQUESTSCOUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getReadRequestsCount()); + } + if (hasWriteRequestsCount()) { + hash = (37 * hash) + WRITEREQUESTSCOUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getWriteRequestsCount()); + } + if (hasTotalCompactingKVs()) { + hash = (37 * hash) + TOTALCOMPACTINGKVS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTotalCompactingKVs()); + } + if (hasCurrentCompactedKVs()) { + hash = (37 * hash) + CURRENTCOMPACTEDKVS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCurrentCompactedKVs()); + } + if (hasRootIndexSizeKB()) { + hash = (37 * hash) + ROOTINDEXSIZEKB_FIELD_NUMBER; + hash = (53 * hash) + getRootIndexSizeKB(); + } + if (hasTotalStaticIndexSizeKB()) { + hash = (37 * hash) + TOTALSTATICINDEXSIZEKB_FIELD_NUMBER; + hash = (53 * hash) + getTotalStaticIndexSizeKB(); + } + if (hasTotalStaticBloomSizeKB()) { + hash = (37 * hash) + TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER; + hash = (53 * hash) + getTotalStaticBloomSizeKB(); + } + if (getCoprocessorsCount() > 0) { + hash = (37 * hash) + COPROCESSORS_FIELD_NUMBER; + hash = (53 * hash) + getCoprocessorsList().hashCode(); + } + if (hasCompleteSequenceId()) { + hash = (37 * hash) + COMPLETESEQUENCEID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCompleteSequenceId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionSpecifierFieldBuilder(); + getCoprocessorsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionSpecifierBuilder_ == null) { + regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionSpecifierBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + stores_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + storefiles_ = 0; + bitField0_ = (bitField0_ & ~0x00000004); + storeUncompressedSizeMB_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + storefileSizeMB_ = 0; + bitField0_ = (bitField0_ & ~0x00000010); + memstoreSizeMB_ = 0; + bitField0_ = (bitField0_ & ~0x00000020); + storefileIndexSizeMB_ = 0; + bitField0_ = (bitField0_ & ~0x00000040); + readRequestsCount_ = 0L; + bitField0_ = (bitField0_ & ~0x00000080); + writeRequestsCount_ = 0L; + bitField0_ = (bitField0_ & ~0x00000100); + totalCompactingKVs_ = 0L; + bitField0_ = (bitField0_ & ~0x00000200); + currentCompactedKVs_ = 0L; + bitField0_ = (bitField0_ & ~0x00000400); + rootIndexSizeKB_ = 0; + bitField0_ = (bitField0_ & ~0x00000800); + totalStaticIndexSizeKB_ = 0; + bitField0_ = (bitField0_ & ~0x00001000); + totalStaticBloomSizeKB_ = 0; + bitField0_ = (bitField0_ & ~0x00002000); + if (coprocessorsBuilder_ == null) { + coprocessors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00004000); + } else { + coprocessorsBuilder_.clear(); + } + completeSequenceId_ = 0L; + bitField0_ = (bitField0_ & ~0x00008000); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionSpecifierBuilder_ == null) { + result.regionSpecifier_ = regionSpecifier_; + } else { + result.regionSpecifier_ = regionSpecifierBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.stores_ = stores_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.storefiles_ = storefiles_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.storeUncompressedSizeMB_ = storeUncompressedSizeMB_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.storefileSizeMB_ = storefileSizeMB_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.memstoreSizeMB_ = memstoreSizeMB_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000040; + } + result.storefileIndexSizeMB_ = storefileIndexSizeMB_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000080; + } + result.readRequestsCount_ = readRequestsCount_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000100; + } + result.writeRequestsCount_ = writeRequestsCount_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000200; + } + result.totalCompactingKVs_ = totalCompactingKVs_; + if (((from_bitField0_ & 0x00000400) == 0x00000400)) { + to_bitField0_ |= 0x00000400; + } + result.currentCompactedKVs_ = currentCompactedKVs_; + if (((from_bitField0_ & 0x00000800) == 0x00000800)) { + to_bitField0_ |= 0x00000800; + } + result.rootIndexSizeKB_ = rootIndexSizeKB_; + if (((from_bitField0_ & 0x00001000) == 0x00001000)) { + to_bitField0_ |= 0x00001000; + } + result.totalStaticIndexSizeKB_ = totalStaticIndexSizeKB_; + if (((from_bitField0_ & 0x00002000) == 0x00002000)) { + to_bitField0_ |= 0x00002000; + } + result.totalStaticBloomSizeKB_ = totalStaticBloomSizeKB_; + if (coprocessorsBuilder_ == null) { + if (((bitField0_ & 0x00004000) == 0x00004000)) { + coprocessors_ = java.util.Collections.unmodifiableList(coprocessors_); + bitField0_ = (bitField0_ & ~0x00004000); + } + result.coprocessors_ = coprocessors_; + } else { + result.coprocessors_ = coprocessorsBuilder_.build(); + } + if (((from_bitField0_ & 0x00008000) == 0x00008000)) { + to_bitField0_ |= 0x00004000; + } + result.completeSequenceId_ = completeSequenceId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()) return this; + if (other.hasRegionSpecifier()) { + mergeRegionSpecifier(other.getRegionSpecifier()); + } + if (other.hasStores()) { + setStores(other.getStores()); + } + if (other.hasStorefiles()) { + setStorefiles(other.getStorefiles()); + } + if (other.hasStoreUncompressedSizeMB()) { + setStoreUncompressedSizeMB(other.getStoreUncompressedSizeMB()); + } + if (other.hasStorefileSizeMB()) { + setStorefileSizeMB(other.getStorefileSizeMB()); + } + if (other.hasMemstoreSizeMB()) { + setMemstoreSizeMB(other.getMemstoreSizeMB()); + } + if (other.hasStorefileIndexSizeMB()) { + setStorefileIndexSizeMB(other.getStorefileIndexSizeMB()); + } + if (other.hasReadRequestsCount()) { + setReadRequestsCount(other.getReadRequestsCount()); + } + if (other.hasWriteRequestsCount()) { + setWriteRequestsCount(other.getWriteRequestsCount()); + } + if (other.hasTotalCompactingKVs()) { + setTotalCompactingKVs(other.getTotalCompactingKVs()); + } + if (other.hasCurrentCompactedKVs()) { + setCurrentCompactedKVs(other.getCurrentCompactedKVs()); + } + if (other.hasRootIndexSizeKB()) { + setRootIndexSizeKB(other.getRootIndexSizeKB()); + } + if (other.hasTotalStaticIndexSizeKB()) { + setTotalStaticIndexSizeKB(other.getTotalStaticIndexSizeKB()); + } + if (other.hasTotalStaticBloomSizeKB()) { + setTotalStaticBloomSizeKB(other.getTotalStaticBloomSizeKB()); + } + if (coprocessorsBuilder_ == null) { + if (!other.coprocessors_.isEmpty()) { + if (coprocessors_.isEmpty()) { + coprocessors_ = other.coprocessors_; + bitField0_ = (bitField0_ & ~0x00004000); + } else { + ensureCoprocessorsIsMutable(); + coprocessors_.addAll(other.coprocessors_); + } + onChanged(); + } + } else { + if (!other.coprocessors_.isEmpty()) { + if (coprocessorsBuilder_.isEmpty()) { + coprocessorsBuilder_.dispose(); + coprocessorsBuilder_ = null; + coprocessors_ = other.coprocessors_; + bitField0_ = (bitField0_ & ~0x00004000); + coprocessorsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getCoprocessorsFieldBuilder() : null; + } else { + coprocessorsBuilder_.addAllMessages(other.coprocessors_); + } + } + } + if (other.hasCompleteSequenceId()) { + setCompleteSequenceId(other.getCompleteSequenceId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionSpecifier()) { + + return false; + } + if (!getRegionSpecifier().isInitialized()) { + + return false; + } + for (int i = 0; i < getCoprocessorsCount(); i++) { + if (!getCoprocessors(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegionSpecifier()) { + subBuilder.mergeFrom(getRegionSpecifier()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegionSpecifier(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + stores_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + storefiles_ = input.readUInt32(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + storeUncompressedSizeMB_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + storefileSizeMB_ = input.readUInt32(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + memstoreSizeMB_ = input.readUInt32(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + storefileIndexSizeMB_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + readRequestsCount_ = input.readUInt64(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + writeRequestsCount_ = input.readUInt64(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + totalCompactingKVs_ = input.readUInt64(); + break; + } + case 88: { + bitField0_ |= 0x00000400; + currentCompactedKVs_ = input.readUInt64(); + break; + } + case 96: { + bitField0_ |= 0x00000800; + rootIndexSizeKB_ = input.readUInt32(); + break; + } + case 104: { + bitField0_ |= 0x00001000; + totalStaticIndexSizeKB_ = input.readUInt32(); + break; + } + case 112: { + bitField0_ |= 0x00002000; + totalStaticBloomSizeKB_ = input.readUInt32(); + break; + } + case 122: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addCoprocessors(subBuilder.buildPartial()); + break; + } + case 128: { + bitField0_ |= 0x00008000; + completeSequenceId_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier regionSpecifier = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionSpecifierBuilder_; + public boolean hasRegionSpecifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { + if (regionSpecifierBuilder_ == null) { + return regionSpecifier_; + } else { + return regionSpecifierBuilder_.getMessage(); + } + } + public Builder setRegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionSpecifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + regionSpecifier_ = value; + onChanged(); + } else { + regionSpecifierBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegionSpecifier( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionSpecifierBuilder_ == null) { + regionSpecifier_ = builderForValue.build(); + onChanged(); + } else { + regionSpecifierBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionSpecifierBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + regionSpecifier_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + regionSpecifier_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionSpecifier_).mergeFrom(value).buildPartial(); + } else { + regionSpecifier_ = value; + } + onChanged(); + } else { + regionSpecifierBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegionSpecifier() { + if (regionSpecifierBuilder_ == null) { + regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionSpecifierBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionSpecifierBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionSpecifierFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { + if (regionSpecifierBuilder_ != null) { + return regionSpecifierBuilder_.getMessageOrBuilder(); + } else { + return regionSpecifier_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionSpecifierFieldBuilder() { + if (regionSpecifierBuilder_ == null) { + regionSpecifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + regionSpecifier_, + getParentForChildren(), + isClean()); + regionSpecifier_ = null; + } + return regionSpecifierBuilder_; + } + + // optional uint32 stores = 2; + private int stores_ ; + public boolean hasStores() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getStores() { + return stores_; + } + public Builder setStores(int value) { + bitField0_ |= 0x00000002; + stores_ = value; + onChanged(); + return this; + } + public Builder clearStores() { + bitField0_ = (bitField0_ & ~0x00000002); + stores_ = 0; + onChanged(); + return this; + } + + // optional uint32 storefiles = 3; + private int storefiles_ ; + public boolean hasStorefiles() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getStorefiles() { + return storefiles_; + } + public Builder setStorefiles(int value) { + bitField0_ |= 0x00000004; + storefiles_ = value; + onChanged(); + return this; + } + public Builder clearStorefiles() { + bitField0_ = (bitField0_ & ~0x00000004); + storefiles_ = 0; + onChanged(); + return this; + } + + // optional uint32 storeUncompressedSizeMB = 4; + private int storeUncompressedSizeMB_ ; + public boolean hasStoreUncompressedSizeMB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getStoreUncompressedSizeMB() { + return storeUncompressedSizeMB_; + } + public Builder setStoreUncompressedSizeMB(int value) { + bitField0_ |= 0x00000008; + storeUncompressedSizeMB_ = value; + onChanged(); + return this; + } + public Builder clearStoreUncompressedSizeMB() { + bitField0_ = (bitField0_ & ~0x00000008); + storeUncompressedSizeMB_ = 0; + onChanged(); + return this; + } + + // optional uint32 storefileSizeMB = 5; + private int storefileSizeMB_ ; + public boolean hasStorefileSizeMB() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getStorefileSizeMB() { + return storefileSizeMB_; + } + public Builder setStorefileSizeMB(int value) { + bitField0_ |= 0x00000010; + storefileSizeMB_ = value; + onChanged(); + return this; + } + public Builder clearStorefileSizeMB() { + bitField0_ = (bitField0_ & ~0x00000010); + storefileSizeMB_ = 0; + onChanged(); + return this; + } + + // optional uint32 memstoreSizeMB = 6; + private int memstoreSizeMB_ ; + public boolean hasMemstoreSizeMB() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public int getMemstoreSizeMB() { + return memstoreSizeMB_; + } + public Builder setMemstoreSizeMB(int value) { + bitField0_ |= 0x00000020; + memstoreSizeMB_ = value; + onChanged(); + return this; + } + public Builder clearMemstoreSizeMB() { + bitField0_ = (bitField0_ & ~0x00000020); + memstoreSizeMB_ = 0; + onChanged(); + return this; + } + + // optional uint32 storefileIndexSizeMB = 7; + private int storefileIndexSizeMB_ ; + public boolean hasStorefileIndexSizeMB() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getStorefileIndexSizeMB() { + return storefileIndexSizeMB_; + } + public Builder setStorefileIndexSizeMB(int value) { + bitField0_ |= 0x00000040; + storefileIndexSizeMB_ = value; + onChanged(); + return this; + } + public Builder clearStorefileIndexSizeMB() { + bitField0_ = (bitField0_ & ~0x00000040); + storefileIndexSizeMB_ = 0; + onChanged(); + return this; + } + + // optional uint64 readRequestsCount = 8; + private long readRequestsCount_ ; + public boolean hasReadRequestsCount() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public long getReadRequestsCount() { + return readRequestsCount_; + } + public Builder setReadRequestsCount(long value) { + bitField0_ |= 0x00000080; + readRequestsCount_ = value; + onChanged(); + return this; + } + public Builder clearReadRequestsCount() { + bitField0_ = (bitField0_ & ~0x00000080); + readRequestsCount_ = 0L; + onChanged(); + return this; + } + + // optional uint64 writeRequestsCount = 9; + private long writeRequestsCount_ ; + public boolean hasWriteRequestsCount() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public long getWriteRequestsCount() { + return writeRequestsCount_; + } + public Builder setWriteRequestsCount(long value) { + bitField0_ |= 0x00000100; + writeRequestsCount_ = value; + onChanged(); + return this; + } + public Builder clearWriteRequestsCount() { + bitField0_ = (bitField0_ & ~0x00000100); + writeRequestsCount_ = 0L; + onChanged(); + return this; + } + + // optional uint64 totalCompactingKVs = 10; + private long totalCompactingKVs_ ; + public boolean hasTotalCompactingKVs() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + public long getTotalCompactingKVs() { + return totalCompactingKVs_; + } + public Builder setTotalCompactingKVs(long value) { + bitField0_ |= 0x00000200; + totalCompactingKVs_ = value; + onChanged(); + return this; + } + public Builder clearTotalCompactingKVs() { + bitField0_ = (bitField0_ & ~0x00000200); + totalCompactingKVs_ = 0L; + onChanged(); + return this; + } + + // optional uint64 currentCompactedKVs = 11; + private long currentCompactedKVs_ ; + public boolean hasCurrentCompactedKVs() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + public long getCurrentCompactedKVs() { + return currentCompactedKVs_; + } + public Builder setCurrentCompactedKVs(long value) { + bitField0_ |= 0x00000400; + currentCompactedKVs_ = value; + onChanged(); + return this; + } + public Builder clearCurrentCompactedKVs() { + bitField0_ = (bitField0_ & ~0x00000400); + currentCompactedKVs_ = 0L; + onChanged(); + return this; + } + + // optional uint32 rootIndexSizeKB = 12; + private int rootIndexSizeKB_ ; + public boolean hasRootIndexSizeKB() { + return ((bitField0_ & 0x00000800) == 0x00000800); + } + public int getRootIndexSizeKB() { + return rootIndexSizeKB_; + } + public Builder setRootIndexSizeKB(int value) { + bitField0_ |= 0x00000800; + rootIndexSizeKB_ = value; + onChanged(); + return this; + } + public Builder clearRootIndexSizeKB() { + bitField0_ = (bitField0_ & ~0x00000800); + rootIndexSizeKB_ = 0; + onChanged(); + return this; + } + + // optional uint32 totalStaticIndexSizeKB = 13; + private int totalStaticIndexSizeKB_ ; + public boolean hasTotalStaticIndexSizeKB() { + return ((bitField0_ & 0x00001000) == 0x00001000); + } + public int getTotalStaticIndexSizeKB() { + return totalStaticIndexSizeKB_; + } + public Builder setTotalStaticIndexSizeKB(int value) { + bitField0_ |= 0x00001000; + totalStaticIndexSizeKB_ = value; + onChanged(); + return this; + } + public Builder clearTotalStaticIndexSizeKB() { + bitField0_ = (bitField0_ & ~0x00001000); + totalStaticIndexSizeKB_ = 0; + onChanged(); + return this; + } + + // optional uint32 totalStaticBloomSizeKB = 14; + private int totalStaticBloomSizeKB_ ; + public boolean hasTotalStaticBloomSizeKB() { + return ((bitField0_ & 0x00002000) == 0x00002000); + } + public int getTotalStaticBloomSizeKB() { + return totalStaticBloomSizeKB_; + } + public Builder setTotalStaticBloomSizeKB(int value) { + bitField0_ |= 0x00002000; + totalStaticBloomSizeKB_ = value; + onChanged(); + return this; + } + public Builder clearTotalStaticBloomSizeKB() { + bitField0_ = (bitField0_ & ~0x00002000); + totalStaticBloomSizeKB_ = 0; + onChanged(); + return this; + } + + // repeated .Coprocessor coprocessors = 15; + private java.util.List coprocessors_ = + java.util.Collections.emptyList(); + private void ensureCoprocessorsIsMutable() { + if (!((bitField0_ & 0x00004000) == 0x00004000)) { + coprocessors_ = new java.util.ArrayList(coprocessors_); + bitField0_ |= 0x00004000; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_; + + public java.util.List getCoprocessorsList() { + if (coprocessorsBuilder_ == null) { + return java.util.Collections.unmodifiableList(coprocessors_); + } else { + return coprocessorsBuilder_.getMessageList(); + } + } + public int getCoprocessorsCount() { + if (coprocessorsBuilder_ == null) { + return coprocessors_.size(); + } else { + return coprocessorsBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { + if (coprocessorsBuilder_ == null) { + return coprocessors_.get(index); + } else { + return coprocessorsBuilder_.getMessage(index); + } + } + public Builder setCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (coprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCoprocessorsIsMutable(); + coprocessors_.set(index, value); + onChanged(); + } else { + coprocessorsBuilder_.setMessage(index, value); + } + return this; + } + public Builder setCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.set(index, builderForValue.build()); + onChanged(); + } else { + coprocessorsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (coprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCoprocessorsIsMutable(); + coprocessors_.add(value); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(value); + } + return this; + } + public Builder addCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (coprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCoprocessorsIsMutable(); + coprocessors_.add(index, value); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(index, value); + } + return this; + } + public Builder addCoprocessors( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.add(builderForValue.build()); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.add(index, builderForValue.build()); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllCoprocessors( + java.lang.Iterable values) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + super.addAll(values, coprocessors_); + onChanged(); + } else { + coprocessorsBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearCoprocessors() { + if (coprocessorsBuilder_ == null) { + coprocessors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00004000); + onChanged(); + } else { + coprocessorsBuilder_.clear(); + } + return this; + } + public Builder removeCoprocessors(int index) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.remove(index); + onChanged(); + } else { + coprocessorsBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder( + int index) { + return getCoprocessorsFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( + int index) { + if (coprocessorsBuilder_ == null) { + return coprocessors_.get(index); } else { + return coprocessorsBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getCoprocessorsOrBuilderList() { + if (coprocessorsBuilder_ != null) { + return coprocessorsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(coprocessors_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() { + return getCoprocessorsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder( + int index) { + return getCoprocessorsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); + } + public java.util.List + getCoprocessorsBuilderList() { + return getCoprocessorsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> + getCoprocessorsFieldBuilder() { + if (coprocessorsBuilder_ == null) { + coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( + coprocessors_, + ((bitField0_ & 0x00004000) == 0x00004000), + getParentForChildren(), + isClean()); + coprocessors_ = null; + } + return coprocessorsBuilder_; + } + + // optional uint64 completeSequenceId = 16; + private long completeSequenceId_ ; + public boolean hasCompleteSequenceId() { + return ((bitField0_ & 0x00008000) == 0x00008000); + } + public long getCompleteSequenceId() { + return completeSequenceId_; + } + public Builder setCompleteSequenceId(long value) { + bitField0_ |= 0x00008000; + completeSequenceId_ = value; + onChanged(); + return this; + } + public Builder clearCompleteSequenceId() { + bitField0_ = (bitField0_ & ~0x00008000); + completeSequenceId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionLoad) + } + + static { + defaultInstance = new RegionLoad(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionLoad) + } + + public interface ServerLoadOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint32 numberOfRequests = 1; + boolean hasNumberOfRequests(); + int getNumberOfRequests(); + + // optional uint32 totalNumberOfRequests = 2; + boolean hasTotalNumberOfRequests(); + int getTotalNumberOfRequests(); + + // optional uint32 usedHeapMB = 3; + boolean hasUsedHeapMB(); + int getUsedHeapMB(); + + // optional uint32 maxHeapMB = 4; + boolean hasMaxHeapMB(); + int getMaxHeapMB(); + + // repeated .RegionLoad regionLoads = 5; + java.util.List + getRegionLoadsList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index); + int getRegionLoadsCount(); + java.util.List + getRegionLoadsOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( + int index); + + // repeated .Coprocessor coprocessors = 6; + java.util.List + getCoprocessorsList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index); + int getCoprocessorsCount(); + java.util.List + getCoprocessorsOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( + int index); + + // optional uint64 reportStartTime = 7; + boolean hasReportStartTime(); + long getReportStartTime(); + + // optional uint64 reportEndTime = 8; + boolean hasReportEndTime(); + long getReportEndTime(); + + // optional uint32 infoServerPort = 9; + boolean hasInfoServerPort(); + int getInfoServerPort(); + } + public static final class ServerLoad extends + com.google.protobuf.GeneratedMessage + implements ServerLoadOrBuilder { + // Use ServerLoad.newBuilder() to construct. + private ServerLoad(Builder builder) { + super(builder); + } + private ServerLoad(boolean noInit) {} + + private static final ServerLoad defaultInstance; + public static ServerLoad getDefaultInstance() { + return defaultInstance; + } + + public ServerLoad getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable; + } + + private int bitField0_; + // optional uint32 numberOfRequests = 1; + public static final int NUMBEROFREQUESTS_FIELD_NUMBER = 1; + private int numberOfRequests_; + public boolean hasNumberOfRequests() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getNumberOfRequests() { + return numberOfRequests_; + } + + // optional uint32 totalNumberOfRequests = 2; + public static final int TOTALNUMBEROFREQUESTS_FIELD_NUMBER = 2; + private int totalNumberOfRequests_; + public boolean hasTotalNumberOfRequests() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTotalNumberOfRequests() { + return totalNumberOfRequests_; + } + + // optional uint32 usedHeapMB = 3; + public static final int USEDHEAPMB_FIELD_NUMBER = 3; + private int usedHeapMB_; + public boolean hasUsedHeapMB() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getUsedHeapMB() { + return usedHeapMB_; + } + + // optional uint32 maxHeapMB = 4; + public static final int MAXHEAPMB_FIELD_NUMBER = 4; + private int maxHeapMB_; + public boolean hasMaxHeapMB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getMaxHeapMB() { + return maxHeapMB_; + } + + // repeated .RegionLoad regionLoads = 5; + public static final int REGIONLOADS_FIELD_NUMBER = 5; + private java.util.List regionLoads_; + public java.util.List getRegionLoadsList() { + return regionLoads_; + } + public java.util.List + getRegionLoadsOrBuilderList() { + return regionLoads_; + } + public int getRegionLoadsCount() { + return regionLoads_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index) { + return regionLoads_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( + int index) { + return regionLoads_.get(index); + } + + // repeated .Coprocessor coprocessors = 6; + public static final int COPROCESSORS_FIELD_NUMBER = 6; + private java.util.List coprocessors_; + public java.util.List getCoprocessorsList() { + return coprocessors_; + } + public java.util.List + getCoprocessorsOrBuilderList() { + return coprocessors_; + } + public int getCoprocessorsCount() { + return coprocessors_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { + return coprocessors_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( + int index) { + return coprocessors_.get(index); + } + + // optional uint64 reportStartTime = 7; + public static final int REPORTSTARTTIME_FIELD_NUMBER = 7; + private long reportStartTime_; + public boolean hasReportStartTime() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public long getReportStartTime() { + return reportStartTime_; + } + + // optional uint64 reportEndTime = 8; + public static final int REPORTENDTIME_FIELD_NUMBER = 8; + private long reportEndTime_; + public boolean hasReportEndTime() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public long getReportEndTime() { + return reportEndTime_; + } + + // optional uint32 infoServerPort = 9; + public static final int INFOSERVERPORT_FIELD_NUMBER = 9; + private int infoServerPort_; + public boolean hasInfoServerPort() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getInfoServerPort() { + return infoServerPort_; + } + + private void initFields() { + numberOfRequests_ = 0; + totalNumberOfRequests_ = 0; + usedHeapMB_ = 0; + maxHeapMB_ = 0; + regionLoads_ = java.util.Collections.emptyList(); + coprocessors_ = java.util.Collections.emptyList(); + reportStartTime_ = 0L; + reportEndTime_ = 0L; + infoServerPort_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionLoadsCount(); i++) { + if (!getRegionLoads(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getCoprocessorsCount(); i++) { + if (!getCoprocessors(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, numberOfRequests_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, totalNumberOfRequests_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt32(3, usedHeapMB_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt32(4, maxHeapMB_); + } + for (int i = 0; i < regionLoads_.size(); i++) { + output.writeMessage(5, regionLoads_.get(i)); + } + for (int i = 0; i < coprocessors_.size(); i++) { + output.writeMessage(6, coprocessors_.get(i)); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt64(7, reportStartTime_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeUInt64(8, reportEndTime_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt32(9, infoServerPort_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, numberOfRequests_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, totalNumberOfRequests_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(3, usedHeapMB_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, maxHeapMB_); + } + for (int i = 0; i < regionLoads_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, regionLoads_.get(i)); + } + for (int i = 0; i < coprocessors_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, coprocessors_.get(i)); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(7, reportStartTime_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(8, reportEndTime_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(9, infoServerPort_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) obj; + + boolean result = true; + result = result && (hasNumberOfRequests() == other.hasNumberOfRequests()); + if (hasNumberOfRequests()) { + result = result && (getNumberOfRequests() + == other.getNumberOfRequests()); + } + result = result && (hasTotalNumberOfRequests() == other.hasTotalNumberOfRequests()); + if (hasTotalNumberOfRequests()) { + result = result && (getTotalNumberOfRequests() + == other.getTotalNumberOfRequests()); + } + result = result && (hasUsedHeapMB() == other.hasUsedHeapMB()); + if (hasUsedHeapMB()) { + result = result && (getUsedHeapMB() + == other.getUsedHeapMB()); + } + result = result && (hasMaxHeapMB() == other.hasMaxHeapMB()); + if (hasMaxHeapMB()) { + result = result && (getMaxHeapMB() + == other.getMaxHeapMB()); + } + result = result && getRegionLoadsList() + .equals(other.getRegionLoadsList()); + result = result && getCoprocessorsList() + .equals(other.getCoprocessorsList()); + result = result && (hasReportStartTime() == other.hasReportStartTime()); + if (hasReportStartTime()) { + result = result && (getReportStartTime() + == other.getReportStartTime()); + } + result = result && (hasReportEndTime() == other.hasReportEndTime()); + if (hasReportEndTime()) { + result = result && (getReportEndTime() + == other.getReportEndTime()); + } + result = result && (hasInfoServerPort() == other.hasInfoServerPort()); + if (hasInfoServerPort()) { + result = result && (getInfoServerPort() + == other.getInfoServerPort()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasNumberOfRequests()) { + hash = (37 * hash) + NUMBEROFREQUESTS_FIELD_NUMBER; + hash = (53 * hash) + getNumberOfRequests(); + } + if (hasTotalNumberOfRequests()) { + hash = (37 * hash) + TOTALNUMBEROFREQUESTS_FIELD_NUMBER; + hash = (53 * hash) + getTotalNumberOfRequests(); + } + if (hasUsedHeapMB()) { + hash = (37 * hash) + USEDHEAPMB_FIELD_NUMBER; + hash = (53 * hash) + getUsedHeapMB(); + } + if (hasMaxHeapMB()) { + hash = (37 * hash) + MAXHEAPMB_FIELD_NUMBER; + hash = (53 * hash) + getMaxHeapMB(); + } + if (getRegionLoadsCount() > 0) { + hash = (37 * hash) + REGIONLOADS_FIELD_NUMBER; + hash = (53 * hash) + getRegionLoadsList().hashCode(); + } + if (getCoprocessorsCount() > 0) { + hash = (37 * hash) + COPROCESSORS_FIELD_NUMBER; + hash = (53 * hash) + getCoprocessorsList().hashCode(); + } + if (hasReportStartTime()) { + hash = (37 * hash) + REPORTSTARTTIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getReportStartTime()); + } + if (hasReportEndTime()) { + hash = (37 * hash) + REPORTENDTIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getReportEndTime()); + } + if (hasInfoServerPort()) { + hash = (37 * hash) + INFOSERVERPORT_FIELD_NUMBER; + hash = (53 * hash) + getInfoServerPort(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionLoadsFieldBuilder(); + getCoprocessorsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + numberOfRequests_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + totalNumberOfRequests_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + usedHeapMB_ = 0; + bitField0_ = (bitField0_ & ~0x00000004); + maxHeapMB_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + if (regionLoadsBuilder_ == null) { + regionLoads_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + regionLoadsBuilder_.clear(); + } + if (coprocessorsBuilder_ == null) { + coprocessors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + coprocessorsBuilder_.clear(); + } + reportStartTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000040); + reportEndTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000080); + infoServerPort_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.numberOfRequests_ = numberOfRequests_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.totalNumberOfRequests_ = totalNumberOfRequests_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.usedHeapMB_ = usedHeapMB_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.maxHeapMB_ = maxHeapMB_; + if (regionLoadsBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + regionLoads_ = java.util.Collections.unmodifiableList(regionLoads_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.regionLoads_ = regionLoads_; + } else { + result.regionLoads_ = regionLoadsBuilder_.build(); + } + if (coprocessorsBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + coprocessors_ = java.util.Collections.unmodifiableList(coprocessors_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.coprocessors_ = coprocessors_; + } else { + result.coprocessors_ = coprocessorsBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.reportStartTime_ = reportStartTime_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.reportEndTime_ = reportEndTime_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000040; + } + result.infoServerPort_ = infoServerPort_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) return this; + if (other.hasNumberOfRequests()) { + setNumberOfRequests(other.getNumberOfRequests()); + } + if (other.hasTotalNumberOfRequests()) { + setTotalNumberOfRequests(other.getTotalNumberOfRequests()); + } + if (other.hasUsedHeapMB()) { + setUsedHeapMB(other.getUsedHeapMB()); + } + if (other.hasMaxHeapMB()) { + setMaxHeapMB(other.getMaxHeapMB()); + } + if (regionLoadsBuilder_ == null) { + if (!other.regionLoads_.isEmpty()) { + if (regionLoads_.isEmpty()) { + regionLoads_ = other.regionLoads_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureRegionLoadsIsMutable(); + regionLoads_.addAll(other.regionLoads_); + } + onChanged(); + } + } else { + if (!other.regionLoads_.isEmpty()) { + if (regionLoadsBuilder_.isEmpty()) { + regionLoadsBuilder_.dispose(); + regionLoadsBuilder_ = null; + regionLoads_ = other.regionLoads_; + bitField0_ = (bitField0_ & ~0x00000010); + regionLoadsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionLoadsFieldBuilder() : null; + } else { + regionLoadsBuilder_.addAllMessages(other.regionLoads_); + } + } + } + if (coprocessorsBuilder_ == null) { + if (!other.coprocessors_.isEmpty()) { + if (coprocessors_.isEmpty()) { + coprocessors_ = other.coprocessors_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureCoprocessorsIsMutable(); + coprocessors_.addAll(other.coprocessors_); + } + onChanged(); + } + } else { + if (!other.coprocessors_.isEmpty()) { + if (coprocessorsBuilder_.isEmpty()) { + coprocessorsBuilder_.dispose(); + coprocessorsBuilder_ = null; + coprocessors_ = other.coprocessors_; + bitField0_ = (bitField0_ & ~0x00000020); + coprocessorsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getCoprocessorsFieldBuilder() : null; + } else { + coprocessorsBuilder_.addAllMessages(other.coprocessors_); + } + } + } + if (other.hasReportStartTime()) { + setReportStartTime(other.getReportStartTime()); + } + if (other.hasReportEndTime()) { + setReportEndTime(other.getReportEndTime()); + } + if (other.hasInfoServerPort()) { + setInfoServerPort(other.getInfoServerPort()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionLoadsCount(); i++) { + if (!getRegionLoads(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getCoprocessorsCount(); i++) { + if (!getCoprocessors(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + numberOfRequests_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + totalNumberOfRequests_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + usedHeapMB_ = input.readUInt32(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + maxHeapMB_ = input.readUInt32(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegionLoads(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addCoprocessors(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + reportStartTime_ = input.readUInt64(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + reportEndTime_ = input.readUInt64(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + infoServerPort_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // optional uint32 numberOfRequests = 1; + private int numberOfRequests_ ; + public boolean hasNumberOfRequests() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getNumberOfRequests() { + return numberOfRequests_; + } + public Builder setNumberOfRequests(int value) { + bitField0_ |= 0x00000001; + numberOfRequests_ = value; + onChanged(); + return this; + } + public Builder clearNumberOfRequests() { + bitField0_ = (bitField0_ & ~0x00000001); + numberOfRequests_ = 0; + onChanged(); + return this; + } + + // optional uint32 totalNumberOfRequests = 2; + private int totalNumberOfRequests_ ; + public boolean hasTotalNumberOfRequests() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTotalNumberOfRequests() { + return totalNumberOfRequests_; + } + public Builder setTotalNumberOfRequests(int value) { + bitField0_ |= 0x00000002; + totalNumberOfRequests_ = value; + onChanged(); + return this; + } + public Builder clearTotalNumberOfRequests() { + bitField0_ = (bitField0_ & ~0x00000002); + totalNumberOfRequests_ = 0; + onChanged(); + return this; + } + + // optional uint32 usedHeapMB = 3; + private int usedHeapMB_ ; + public boolean hasUsedHeapMB() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getUsedHeapMB() { + return usedHeapMB_; + } + public Builder setUsedHeapMB(int value) { + bitField0_ |= 0x00000004; + usedHeapMB_ = value; + onChanged(); + return this; + } + public Builder clearUsedHeapMB() { + bitField0_ = (bitField0_ & ~0x00000004); + usedHeapMB_ = 0; + onChanged(); + return this; + } + + // optional uint32 maxHeapMB = 4; + private int maxHeapMB_ ; + public boolean hasMaxHeapMB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getMaxHeapMB() { + return maxHeapMB_; + } + public Builder setMaxHeapMB(int value) { + bitField0_ |= 0x00000008; + maxHeapMB_ = value; + onChanged(); + return this; + } + public Builder clearMaxHeapMB() { + bitField0_ = (bitField0_ & ~0x00000008); + maxHeapMB_ = 0; + onChanged(); + return this; + } + + // repeated .RegionLoad regionLoads = 5; + private java.util.List regionLoads_ = + java.util.Collections.emptyList(); + private void ensureRegionLoadsIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + regionLoads_ = new java.util.ArrayList(regionLoads_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder> regionLoadsBuilder_; + + public java.util.List getRegionLoadsList() { + if (regionLoadsBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionLoads_); + } else { + return regionLoadsBuilder_.getMessageList(); + } + } + public int getRegionLoadsCount() { + if (regionLoadsBuilder_ == null) { + return regionLoads_.size(); + } else { + return regionLoadsBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index) { + if (regionLoadsBuilder_ == null) { + return regionLoads_.get(index); + } else { + return regionLoadsBuilder_.getMessage(index); + } + } + public Builder setRegionLoads( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { + if (regionLoadsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionLoadsIsMutable(); + regionLoads_.set(index, value); + onChanged(); + } else { + regionLoadsBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegionLoads( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { + if (regionLoadsBuilder_ == null) { + ensureRegionLoadsIsMutable(); + regionLoads_.set(index, builderForValue.build()); + onChanged(); + } else { + regionLoadsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegionLoads(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { + if (regionLoadsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionLoadsIsMutable(); + regionLoads_.add(value); + onChanged(); + } else { + regionLoadsBuilder_.addMessage(value); + } + return this; + } + public Builder addRegionLoads( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { + if (regionLoadsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionLoadsIsMutable(); + regionLoads_.add(index, value); + onChanged(); + } else { + regionLoadsBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegionLoads( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { + if (regionLoadsBuilder_ == null) { + ensureRegionLoadsIsMutable(); + regionLoads_.add(builderForValue.build()); + onChanged(); + } else { + regionLoadsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegionLoads( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { + if (regionLoadsBuilder_ == null) { + ensureRegionLoadsIsMutable(); + regionLoads_.add(index, builderForValue.build()); + onChanged(); + } else { + regionLoadsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegionLoads( + java.lang.Iterable values) { + if (regionLoadsBuilder_ == null) { + ensureRegionLoadsIsMutable(); + super.addAll(values, regionLoads_); + onChanged(); + } else { + regionLoadsBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegionLoads() { + if (regionLoadsBuilder_ == null) { + regionLoads_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + regionLoadsBuilder_.clear(); + } + return this; + } + public Builder removeRegionLoads(int index) { + if (regionLoadsBuilder_ == null) { + ensureRegionLoadsIsMutable(); + regionLoads_.remove(index); + onChanged(); + } else { + regionLoadsBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder getRegionLoadsBuilder( + int index) { + return getRegionLoadsFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( + int index) { + if (regionLoadsBuilder_ == null) { + return regionLoads_.get(index); } else { + return regionLoadsBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionLoadsOrBuilderList() { + if (regionLoadsBuilder_ != null) { + return regionLoadsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(regionLoads_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder addRegionLoadsBuilder() { + return getRegionLoadsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder addRegionLoadsBuilder( + int index) { + return getRegionLoadsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()); + } + public java.util.List + getRegionLoadsBuilderList() { + return getRegionLoadsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder> + getRegionLoadsFieldBuilder() { + if (regionLoadsBuilder_ == null) { + regionLoadsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder>( + regionLoads_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + regionLoads_ = null; + } + return regionLoadsBuilder_; + } + + // repeated .Coprocessor coprocessors = 6; + private java.util.List coprocessors_ = + java.util.Collections.emptyList(); + private void ensureCoprocessorsIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + coprocessors_ = new java.util.ArrayList(coprocessors_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_; + + public java.util.List getCoprocessorsList() { + if (coprocessorsBuilder_ == null) { + return java.util.Collections.unmodifiableList(coprocessors_); + } else { + return coprocessorsBuilder_.getMessageList(); + } + } + public int getCoprocessorsCount() { + if (coprocessorsBuilder_ == null) { + return coprocessors_.size(); + } else { + return coprocessorsBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { + if (coprocessorsBuilder_ == null) { + return coprocessors_.get(index); + } else { + return coprocessorsBuilder_.getMessage(index); + } + } + public Builder setCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (coprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCoprocessorsIsMutable(); + coprocessors_.set(index, value); + onChanged(); + } else { + coprocessorsBuilder_.setMessage(index, value); + } + return this; + } + public Builder setCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.set(index, builderForValue.build()); + onChanged(); + } else { + coprocessorsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (coprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCoprocessorsIsMutable(); + coprocessors_.add(value); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(value); + } + return this; + } + public Builder addCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { + if (coprocessorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCoprocessorsIsMutable(); + coprocessors_.add(index, value); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(index, value); + } + return this; + } + public Builder addCoprocessors( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.add(builderForValue.build()); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addCoprocessors( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.add(index, builderForValue.build()); + onChanged(); + } else { + coprocessorsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllCoprocessors( + java.lang.Iterable values) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + super.addAll(values, coprocessors_); + onChanged(); + } else { + coprocessorsBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearCoprocessors() { + if (coprocessorsBuilder_ == null) { + coprocessors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + coprocessorsBuilder_.clear(); + } + return this; + } + public Builder removeCoprocessors(int index) { + if (coprocessorsBuilder_ == null) { + ensureCoprocessorsIsMutable(); + coprocessors_.remove(index); + onChanged(); + } else { + coprocessorsBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder( + int index) { + return getCoprocessorsFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( + int index) { + if (coprocessorsBuilder_ == null) { + return coprocessors_.get(index); } else { + return coprocessorsBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getCoprocessorsOrBuilderList() { + if (coprocessorsBuilder_ != null) { + return coprocessorsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(coprocessors_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() { + return getCoprocessorsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder( + int index) { + return getCoprocessorsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); + } + public java.util.List + getCoprocessorsBuilderList() { + return getCoprocessorsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> + getCoprocessorsFieldBuilder() { + if (coprocessorsBuilder_ == null) { + coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( + coprocessors_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + coprocessors_ = null; + } + return coprocessorsBuilder_; + } + + // optional uint64 reportStartTime = 7; + private long reportStartTime_ ; + public boolean hasReportStartTime() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public long getReportStartTime() { + return reportStartTime_; + } + public Builder setReportStartTime(long value) { + bitField0_ |= 0x00000040; + reportStartTime_ = value; + onChanged(); + return this; + } + public Builder clearReportStartTime() { + bitField0_ = (bitField0_ & ~0x00000040); + reportStartTime_ = 0L; + onChanged(); + return this; + } + + // optional uint64 reportEndTime = 8; + private long reportEndTime_ ; + public boolean hasReportEndTime() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public long getReportEndTime() { + return reportEndTime_; + } + public Builder setReportEndTime(long value) { + bitField0_ |= 0x00000080; + reportEndTime_ = value; + onChanged(); + return this; + } + public Builder clearReportEndTime() { + bitField0_ = (bitField0_ & ~0x00000080); + reportEndTime_ = 0L; + onChanged(); + return this; + } + + // optional uint32 infoServerPort = 9; + private int infoServerPort_ ; + public boolean hasInfoServerPort() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public int getInfoServerPort() { + return infoServerPort_; + } + public Builder setInfoServerPort(int value) { + bitField0_ |= 0x00000100; + infoServerPort_ = value; + onChanged(); + return this; + } + public Builder clearInfoServerPort() { + bitField0_ = (bitField0_ & ~0x00000100); + infoServerPort_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ServerLoad) + } + + static { + defaultInstance = new ServerLoad(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ServerLoad) + } + + public interface TimeRangeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint64 from = 1; + boolean hasFrom(); + long getFrom(); + + // optional uint64 to = 2; + boolean hasTo(); + long getTo(); + } + public static final class TimeRange extends + com.google.protobuf.GeneratedMessage + implements TimeRangeOrBuilder { + // Use TimeRange.newBuilder() to construct. + private TimeRange(Builder builder) { + super(builder); + } + private TimeRange(boolean noInit) {} + + private static final TimeRange defaultInstance; + public static TimeRange getDefaultInstance() { + return defaultInstance; + } + + public TimeRange getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; + } + + private int bitField0_; + // optional uint64 from = 1; + public static final int FROM_FIELD_NUMBER = 1; + private long from_; + public boolean hasFrom() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getFrom() { + return from_; + } + + // optional uint64 to = 2; + public static final int TO_FIELD_NUMBER = 2; + private long to_; + public boolean hasTo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getTo() { + return to_; + } + + private void initFields() { + from_ = 0L; + to_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, from_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, to_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, from_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, to_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) obj; + + boolean result = true; + result = result && (hasFrom() == other.hasFrom()); + if (hasFrom()) { + result = result && (getFrom() + == other.getFrom()); + } + result = result && (hasTo() == other.hasTo()); + if (hasTo()) { + result = result && (getTo() + == other.getTo()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFrom()) { + hash = (37 * hash) + FROM_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getFrom()); + } + if (hasTo()) { + hash = (37 * hash) + TO_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTo()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + from_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + to_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.from_ = from_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.to_ = to_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; + if (other.hasFrom()) { + setFrom(other.getFrom()); + } + if (other.hasTo()) { + setTo(other.getTo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + from_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + to_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // optional uint64 from = 1; + private long from_ ; + public boolean hasFrom() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getFrom() { + return from_; + } + public Builder setFrom(long value) { + bitField0_ |= 0x00000001; + from_ = value; + onChanged(); + return this; + } + public Builder clearFrom() { + bitField0_ = (bitField0_ & ~0x00000001); + from_ = 0L; + onChanged(); + return this; + } + + // optional uint64 to = 2; + private long to_ ; + public boolean hasTo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getTo() { + return to_; + } + public Builder setTo(long value) { + bitField0_ |= 0x00000002; + to_ = value; + onChanged(); + return this; + } + public Builder clearTo() { + bitField0_ = (bitField0_ & ~0x00000002); + to_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TimeRange) + } + + static { + defaultInstance = new TimeRange(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TimeRange) + } + + public interface FilterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // optional bytes serializedFilter = 2; + boolean hasSerializedFilter(); + com.google.protobuf.ByteString getSerializedFilter(); + } + public static final class Filter extends + com.google.protobuf.GeneratedMessage + implements FilterOrBuilder { + // Use Filter.newBuilder() to construct. + private Filter(Builder builder) { + super(builder); + } + private Filter(boolean noInit) {} + + private static final Filter defaultInstance; + public static Filter getDefaultInstance() { + return defaultInstance; + } + + public Filter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes serializedFilter = 2; + public static final int SERIALIZEDFILTER_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString serializedFilter_; + public boolean hasSerializedFilter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSerializedFilter() { + return serializedFilter_; + } + + private void initFields() { + name_ = ""; + serializedFilter_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, serializedFilter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, serializedFilter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasSerializedFilter() == other.hasSerializedFilter()); + if (hasSerializedFilter()) { + result = result && getSerializedFilter() + .equals(other.getSerializedFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasSerializedFilter()) { + hash = (37 * hash) + SERIALIZEDFILTER_FIELD_NUMBER; + hash = (53 * hash) + getSerializedFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + serializedFilter_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.serializedFilter_ = serializedFilter_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasSerializedFilter()) { + setSerializedFilter(other.getSerializedFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + serializedFilter_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // optional bytes serializedFilter = 2; + private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSerializedFilter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSerializedFilter() { + return serializedFilter_; + } + public Builder setSerializedFilter(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + serializedFilter_ = value; + onChanged(); + return this; + } + public Builder clearSerializedFilter() { + bitField0_ = (bitField0_ & ~0x00000002); + serializedFilter_ = getDefaultInstance().getSerializedFilter(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Filter) + } + + static { + defaultInstance = new Filter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Filter) + } + + public interface KeyValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required bytes family = 2; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required bytes qualifier = 3; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // optional uint64 timestamp = 4; + boolean hasTimestamp(); + long getTimestamp(); + + // optional .KeyType keyType = 5; + boolean hasKeyType(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType(); + + // optional bytes value = 6; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class KeyValue extends + com.google.protobuf.GeneratedMessage + implements KeyValueOrBuilder { + // Use KeyValue.newBuilder() to construct. + private KeyValue(Builder builder) { + super(builder); + } + private KeyValue(boolean noInit) {} + + private static final KeyValue defaultInstance; + public static KeyValue getDefaultInstance() { + return defaultInstance; + } + + public KeyValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required bytes family = 2; + public static final int FAMILY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required bytes qualifier = 3; + public static final int QUALIFIER_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // optional uint64 timestamp = 4; + public static final int TIMESTAMP_FIELD_NUMBER = 4; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getTimestamp() { + return timestamp_; + } + + // optional .KeyType keyType = 5; + public static final int KEYTYPE_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType keyType_; + public boolean hasKeyType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType() { + return keyType_; + } + + // optional bytes value = 6; + public static final int VALUE_FIELD_NUMBER = 6; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, timestamp_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeEnum(5, keyType_.getNumber()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBytes(6, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, timestamp_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, keyType_.getNumber()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasKeyType() == other.hasKeyType()); + if (hasKeyType()) { + result = result && + (getKeyType() == other.getKeyType()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasKeyType()) { + hash = (37 * hash) + KEYTYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getKeyType()); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + bitField0_ = (bitField0_ & ~0x00000010); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.keyType_ = keyType_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasKeyType()) { + setKeyType(other.getKeyType()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasFamily()) { + + return false; + } + if (!hasQualifier()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + timestamp_ = input.readUInt64(); + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + keyType_ = value; + } + break; + } + case 50: { + bitField0_ |= 0x00000020; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required bytes family = 2; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000002); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required bytes qualifier = 3; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000004); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // optional uint64 timestamp = 4; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000008; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000008); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional .KeyType keyType = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + public boolean hasKeyType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType() { + return keyType_; + } + public Builder setKeyType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + keyType_ = value; + onChanged(); + return this; + } + public Builder clearKeyType() { + bitField0_ = (bitField0_ & ~0x00000010); + keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + onChanged(); + return this; + } + + // optional bytes value = 6; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000020); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:KeyValue) + } + + static { + defaultInstance = new KeyValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:KeyValue) + } + + public interface ServerNameOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string hostName = 1; + boolean hasHostName(); + String getHostName(); + + // optional uint32 port = 2; + boolean hasPort(); + int getPort(); + + // optional uint64 startCode = 3; + boolean hasStartCode(); + long getStartCode(); + } + public static final class ServerName extends + com.google.protobuf.GeneratedMessage + implements ServerNameOrBuilder { + // Use ServerName.newBuilder() to construct. + private ServerName(Builder builder) { + super(builder); + } + private ServerName(boolean noInit) {} + + private static final ServerName defaultInstance; + public static ServerName getDefaultInstance() { + return defaultInstance; + } + + public ServerName getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; + } + + private int bitField0_; + // required string hostName = 1; + public static final int HOSTNAME_FIELD_NUMBER = 1; + private java.lang.Object hostName_; + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getHostName() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + hostName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional uint32 port = 2; + public static final int PORT_FIELD_NUMBER = 2; + private int port_; + public boolean hasPort() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getPort() { + return port_; + } + + // optional uint64 startCode = 3; + public static final int STARTCODE_FIELD_NUMBER = 3; + private long startCode_; + public boolean hasStartCode() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getStartCode() { + return startCode_; + } + + private void initFields() { + hostName_ = ""; + port_ = 0; + startCode_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasHostName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getHostNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, port_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, startCode_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getHostNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, port_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, startCode_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) obj; + + boolean result = true; + result = result && (hasHostName() == other.hasHostName()); + if (hasHostName()) { + result = result && getHostName() + .equals(other.getHostName()); + } + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasStartCode() == other.hasStartCode()); + if (hasStartCode()) { + result = result && (getStartCode() + == other.getStartCode()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasHostName()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostName().hashCode(); + } + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasStartCode()) { + hash = (37 * hash) + STARTCODE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getStartCode()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + hostName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + startCode_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.hostName_ = hostName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.startCode_ = startCode_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; + if (other.hasHostName()) { + setHostName(other.getHostName()); + } + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasStartCode()) { + setStartCode(other.getStartCode()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasHostName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + hostName_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + port_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + startCode_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required string hostName = 1; + private java.lang.Object hostName_ = ""; + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getHostName() { + java.lang.Object ref = hostName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + hostName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setHostName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + return this; + } + public Builder clearHostName() { + bitField0_ = (bitField0_ & ~0x00000001); + hostName_ = getDefaultInstance().getHostName(); + onChanged(); + return this; + } + void setHostName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + } + + // optional uint32 port = 2; + private int port_ ; + public boolean hasPort() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getPort() { + return port_; + } + public Builder setPort(int value) { + bitField0_ |= 0x00000002; + port_ = value; + onChanged(); + return this; + } + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000002); + port_ = 0; + onChanged(); + return this; + } + + // optional uint64 startCode = 3; + private long startCode_ ; + public boolean hasStartCode() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getStartCode() { + return startCode_; + } + public Builder setStartCode(long value) { + bitField0_ |= 0x00000004; + startCode_ = value; + onChanged(); + return this; + } + public Builder clearStartCode() { + bitField0_ = (bitField0_ & ~0x00000004); + startCode_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ServerName) + } + + static { + defaultInstance = new ServerName(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ServerName) + } + + public interface CoprocessorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + } + public static final class Coprocessor extends + com.google.protobuf.GeneratedMessage + implements CoprocessorOrBuilder { + // Use Coprocessor.newBuilder() to construct. + private Coprocessor(Builder builder) { + super(builder); + } + private Coprocessor(boolean noInit) {} + + private static final Coprocessor defaultInstance; + public static Coprocessor getDefaultInstance() { + return defaultInstance; + } + + public Coprocessor getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + name_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:Coprocessor) + } + + static { + defaultInstance = new Coprocessor(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Coprocessor) + } + + public interface NameStringPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // required string value = 2; + boolean hasValue(); + String getValue(); + } + public static final class NameStringPair extends + com.google.protobuf.GeneratedMessage + implements NameStringPairOrBuilder { + // Use NameStringPair.newBuilder() to construct. + private NameStringPair(Builder builder) { + super(builder); + } + private NameStringPair(boolean noInit) {} + + private static final NameStringPair defaultInstance; + public static NameStringPair getDefaultInstance() { + return defaultInstance; + } + + public NameStringPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private java.lang.Object value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + value_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + name_ = ""; + value_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getValueBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getValueBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // required string value = 2; + private java.lang.Object value_ = ""; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + value_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setValue(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + void setValue(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:NameStringPair) + } + + static { + defaultInstance = new NameStringPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NameStringPair) + } + + public interface NameBytesPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class NameBytesPair extends + com.google.protobuf.GeneratedMessage + implements NameBytesPairOrBuilder { + // Use NameBytesPair.newBuilder() to construct. + private NameBytesPair(Builder builder) { + super(builder); + } + private NameBytesPair(boolean noInit) {} + + private static final NameBytesPair defaultInstance; + public static NameBytesPair getDefaultInstance() { + return defaultInstance; + } + + public NameBytesPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + name_ = ""; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:NameBytesPair) + } + + static { + defaultInstance = new NameBytesPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NameBytesPair) + } + + public interface BytesBytesPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes first = 1; + boolean hasFirst(); + com.google.protobuf.ByteString getFirst(); + + // required bytes second = 2; + boolean hasSecond(); + com.google.protobuf.ByteString getSecond(); + } + public static final class BytesBytesPair extends + com.google.protobuf.GeneratedMessage + implements BytesBytesPairOrBuilder { + // Use BytesBytesPair.newBuilder() to construct. + private BytesBytesPair(Builder builder) { + super(builder); + } + private BytesBytesPair(boolean noInit) {} + + private static final BytesBytesPair defaultInstance; + public static BytesBytesPair getDefaultInstance() { + return defaultInstance; + } + + public BytesBytesPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable; + } + + private int bitField0_; + // required bytes first = 1; + public static final int FIRST_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString first_; + public boolean hasFirst() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFirst() { + return first_; + } + + // required bytes second = 2; + public static final int SECOND_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString second_; + public boolean hasSecond() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSecond() { + return second_; + } + + private void initFields() { + first_ = com.google.protobuf.ByteString.EMPTY; + second_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFirst()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasSecond()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, first_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, second_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, first_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, second_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) obj; + + boolean result = true; + result = result && (hasFirst() == other.hasFirst()); + if (hasFirst()) { + result = result && getFirst() + .equals(other.getFirst()); + } + result = result && (hasSecond() == other.hasSecond()); + if (hasSecond()) { + result = result && getSecond() + .equals(other.getSecond()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFirst()) { + hash = (37 * hash) + FIRST_FIELD_NUMBER; + hash = (53 * hash) + getFirst().hashCode(); + } + if (hasSecond()) { + hash = (37 * hash) + SECOND_FIELD_NUMBER; + hash = (53 * hash) + getSecond().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + first_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + second_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.first_ = first_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.second_ = second_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()) return this; + if (other.hasFirst()) { + setFirst(other.getFirst()); + } + if (other.hasSecond()) { + setSecond(other.getSecond()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFirst()) { + + return false; + } + if (!hasSecond()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + first_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + second_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes first = 1; + private com.google.protobuf.ByteString first_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFirst() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFirst() { + return first_; + } + public Builder setFirst(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + first_ = value; + onChanged(); + return this; + } + public Builder clearFirst() { + bitField0_ = (bitField0_ & ~0x00000001); + first_ = getDefaultInstance().getFirst(); + onChanged(); + return this; + } + + // required bytes second = 2; + private com.google.protobuf.ByteString second_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSecond() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSecond() { + return second_; + } + public Builder setSecond(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + second_ = value; + onChanged(); + return this; + } + public Builder clearSecond() { + bitField0_ = (bitField0_ & ~0x00000002); + second_ = getDefaultInstance().getSecond(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BytesBytesPair) + } + + static { + defaultInstance = new BytesBytesPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BytesBytesPair) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TableSchema_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TableSchema_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TableSchema_Attribute_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TableSchema_Attribute_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ColumnFamilySchema_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ColumnFamilySchema_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ColumnFamilySchema_Attribute_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionInfo_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionSpecifier_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionSpecifier_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionLoad_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionLoad_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ServerLoad_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ServerLoad_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TimeRange_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TimeRange_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Filter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Filter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_KeyValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_KeyValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ServerName_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ServerName_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Coprocessor_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Coprocessor_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NameStringPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NameStringPair_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NameBytesPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NameBytesPair_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BytesBytesPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BytesBytesPair_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\013hbase.proto\"\236\001\n\013TableSchema\022\014\n\004name\030\001 " + + "\001(\014\022*\n\nattributes\030\002 \003(\0132\026.TableSchema.At" + + "tribute\022+\n\016columnFamilies\030\003 \003(\0132\023.Column" + + "FamilySchema\032(\n\tAttribute\022\014\n\004name\030\001 \002(\014\022" + + "\r\n\005value\030\002 \002(\014\"\177\n\022ColumnFamilySchema\022\014\n\004" + + "name\030\001 \002(\014\0221\n\nattributes\030\002 \003(\0132\035.ColumnF" + + "amilySchema.Attribute\032(\n\tAttribute\022\014\n\004na" + + "me\030\001 \002(\014\022\r\n\005value\030\002 \002(\014\"s\n\nRegionInfo\022\020\n" + + "\010regionId\030\001 \002(\004\022\021\n\ttableName\030\002 \002(\014\022\020\n\010st" + + "artKey\030\003 \001(\014\022\016\n\006endKey\030\004 \001(\014\022\017\n\007offline\030", + "\005 \001(\010\022\r\n\005split\030\006 \001(\010\"\225\001\n\017RegionSpecifier" + + "\0222\n\004type\030\001 \002(\0162$.RegionSpecifier.RegionS" + + "pecifierType\022\r\n\005value\030\002 \002(\014\"?\n\023RegionSpe" + + "cifierType\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCODED_R" + + "EGION_NAME\020\002\"\324\003\n\nRegionLoad\022)\n\017regionSpe" + + "cifier\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006stores" + + "\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(\r\022\037\n\027storeUncom" + + "pressedSizeMB\030\004 \001(\r\022\027\n\017storefileSizeMB\030\005" + + " \001(\r\022\026\n\016memstoreSizeMB\030\006 \001(\r\022\034\n\024storefil" + + "eIndexSizeMB\030\007 \001(\r\022\031\n\021readRequestsCount\030", + "\010 \001(\004\022\032\n\022writeRequestsCount\030\t \001(\004\022\032\n\022tot" + + "alCompactingKVs\030\n \001(\004\022\033\n\023currentCompacte" + + "dKVs\030\013 \001(\004\022\027\n\017rootIndexSizeKB\030\014 \001(\r\022\036\n\026t" + + "otalStaticIndexSizeKB\030\r \001(\r\022\036\n\026totalStat" + + "icBloomSizeKB\030\016 \001(\r\022\"\n\014coprocessors\030\017 \003(" + + "\0132\014.Coprocessor\022\032\n\022completeSequenceId\030\020 " + + "\001(\004\"\372\001\n\nServerLoad\022\030\n\020numberOfRequests\030\001" + + " \001(\r\022\035\n\025totalNumberOfRequests\030\002 \001(\r\022\022\n\nu" + + "sedHeapMB\030\003 \001(\r\022\021\n\tmaxHeapMB\030\004 \001(\r\022 \n\013re" + + "gionLoads\030\005 \003(\0132\013.RegionLoad\022\"\n\014coproces", + "sors\030\006 \003(\0132\014.Coprocessor\022\027\n\017reportStartT" + + "ime\030\007 \001(\004\022\025\n\rreportEndTime\030\010 \001(\004\022\026\n\016info" + + "ServerPort\030\t \001(\r\"%\n\tTimeRange\022\014\n\004from\030\001 " + + "\001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Filter\022\014\n\004name\030\001 \002(\t\022" + + "\030\n\020serializedFilter\030\002 \001(\014\"w\n\010KeyValue\022\013\n" + + "\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifier\030" + + "\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004\022\031\n\007keyType\030\005 \001(" + + "\0162\010.KeyType\022\r\n\005value\030\006 \001(\014\"?\n\nServerName" + + "\022\020\n\010hostName\030\001 \002(\t\022\014\n\004port\030\002 \001(\r\022\021\n\tstar" + + "tCode\030\003 \001(\004\"\033\n\013Coprocessor\022\014\n\004name\030\001 \002(\t", + "\"-\n\016NameStringPair\022\014\n\004name\030\001 \002(\t\022\r\n\005valu" + + "e\030\002 \002(\t\",\n\rNameBytesPair\022\014\n\004name\030\001 \002(\t\022\r" + + "\n\005value\030\002 \001(\014\"/\n\016BytesBytesPair\022\r\n\005first" + + "\030\001 \002(\014\022\016\n\006second\030\002 \002(\014*r\n\013CompareType\022\010\n" + + "\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n" + + "\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GR" + + "EATER\020\005\022\t\n\005NO_OP\020\006*_\n\007KeyType\022\013\n\007MINIMUM" + + "\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN" + + "\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*o" + + "rg.apache.hadoop.hbase.protobuf.generate", + "dB\013HBaseProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_TableSchema_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_TableSchema_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TableSchema_descriptor, + new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); + internal_static_TableSchema_Attribute_descriptor = + internal_static_TableSchema_descriptor.getNestedTypes().get(0); + internal_static_TableSchema_Attribute_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TableSchema_Attribute_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder.class); + internal_static_ColumnFamilySchema_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_ColumnFamilySchema_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ColumnFamilySchema_descriptor, + new java.lang.String[] { "Name", "Attributes", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); + internal_static_ColumnFamilySchema_Attribute_descriptor = + internal_static_ColumnFamilySchema_descriptor.getNestedTypes().get(0); + internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ColumnFamilySchema_Attribute_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder.class); + internal_static_RegionInfo_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_RegionInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionInfo_descriptor, + new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); + internal_static_RegionSpecifier_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_RegionSpecifier_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionSpecifier_descriptor, + new java.lang.String[] { "Type", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); + internal_static_RegionLoad_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_RegionLoad_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionLoad_descriptor, + new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "Coprocessors", "CompleteSequenceId", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder.class); + internal_static_ServerLoad_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_ServerLoad_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ServerLoad_descriptor, + new java.lang.String[] { "NumberOfRequests", "TotalNumberOfRequests", "UsedHeapMB", "MaxHeapMB", "RegionLoads", "Coprocessors", "ReportStartTime", "ReportEndTime", "InfoServerPort", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder.class); + internal_static_TimeRange_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_TimeRange_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TimeRange_descriptor, + new java.lang.String[] { "From", "To", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); + internal_static_Filter_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_Filter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Filter_descriptor, + new java.lang.String[] { "Name", "SerializedFilter", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder.class); + internal_static_KeyValue_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_KeyValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_KeyValue_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder.class); + internal_static_ServerName_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_ServerName_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ServerName_descriptor, + new java.lang.String[] { "HostName", "Port", "StartCode", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); + internal_static_Coprocessor_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_Coprocessor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Coprocessor_descriptor, + new java.lang.String[] { "Name", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); + internal_static_NameStringPair_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_NameStringPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NameStringPair_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); + internal_static_NameBytesPair_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_NameBytesPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NameBytesPair_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); + internal_static_BytesBytesPair_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_BytesBytesPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BytesBytesPair_descriptor, + new java.lang.String[] { "First", "Second", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java new file mode 100644 index 0000000..48243e6 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java @@ -0,0 +1,424 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: LoadBalancer.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class LoadBalancerProtos { + private LoadBalancerProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface LoadBalancerStateOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bool balancerOn = 1; + boolean hasBalancerOn(); + boolean getBalancerOn(); + } + public static final class LoadBalancerState extends + com.google.protobuf.GeneratedMessage + implements LoadBalancerStateOrBuilder { + // Use LoadBalancerState.newBuilder() to construct. + private LoadBalancerState(Builder builder) { + super(builder); + } + private LoadBalancerState(boolean noInit) {} + + private static final LoadBalancerState defaultInstance; + public static LoadBalancerState getDefaultInstance() { + return defaultInstance; + } + + public LoadBalancerState getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable; + } + + private int bitField0_; + // optional bool balancerOn = 1; + public static final int BALANCERON_FIELD_NUMBER = 1; + private boolean balancerOn_; + public boolean hasBalancerOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getBalancerOn() { + return balancerOn_; + } + + private void initFields() { + balancerOn_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, balancerOn_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, balancerOn_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) obj; + + boolean result = true; + result = result && (hasBalancerOn() == other.hasBalancerOn()); + if (hasBalancerOn()) { + result = result && (getBalancerOn() + == other.getBalancerOn()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasBalancerOn()) { + hash = (37 * hash) + BALANCERON_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getBalancerOn()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + balancerOn_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState build() { + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.balancerOn_ = balancerOn_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance()) return this; + if (other.hasBalancerOn()) { + setBalancerOn(other.getBalancerOn()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + balancerOn_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional bool balancerOn = 1; + private boolean balancerOn_ ; + public boolean hasBalancerOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getBalancerOn() { + return balancerOn_; + } + public Builder setBalancerOn(boolean value) { + bitField0_ |= 0x00000001; + balancerOn_ = value; + onChanged(); + return this; + } + public Builder clearBalancerOn() { + bitField0_ = (bitField0_ & ~0x00000001); + balancerOn_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LoadBalancerState) + } + + static { + defaultInstance = new LoadBalancerState(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LoadBalancerState) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LoadBalancerState_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LoadBalancerState_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\022LoadBalancer.proto\"\'\n\021LoadBalancerStat" + + "e\022\022\n\nbalancerOn\030\001 \001(\010BE\n*org.apache.hado" + + "op.hbase.protobuf.generatedB\022LoadBalance" + + "rProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_LoadBalancerState_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_LoadBalancerState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LoadBalancerState_descriptor, + new java.lang.String[] { "BalancerOn", }, + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, + org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java new file mode 100644 index 0000000..71be12a --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java @@ -0,0 +1,16419 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: MasterAdmin.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class MasterAdminProtos { + private MasterAdminProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface AddColumnRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required .ColumnFamilySchema columnFamilies = 2; + boolean hasColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); + } + public static final class AddColumnRequest extends + com.google.protobuf.GeneratedMessage + implements AddColumnRequestOrBuilder { + // Use AddColumnRequest.newBuilder() to construct. + private AddColumnRequest(Builder builder) { + super(builder); + } + private AddColumnRequest(boolean noInit) {} + + private static final AddColumnRequest defaultInstance; + public static AddColumnRequest getDefaultInstance() { + return defaultInstance; + } + + public AddColumnRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .ColumnFamilySchema columnFamilies = 2; + public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + return columnFamilies_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + return columnFamilies_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasColumnFamilies()) { + memoizedIsInitialized = 0; + return false; + } + if (!getColumnFamilies().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, columnFamilies_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, columnFamilies_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasColumnFamilies() == other.hasColumnFamilies()); + if (hasColumnFamilies()) { + result = result && getColumnFamilies() + .equals(other.getColumnFamilies()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasColumnFamilies()) { + hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamilies().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFamiliesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (columnFamiliesBuilder_ == null) { + result.columnFamilies_ = columnFamilies_; + } else { + result.columnFamilies_ = columnFamiliesBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasColumnFamilies()) { + mergeColumnFamilies(other.getColumnFamilies()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasColumnFamilies()) { + + return false; + } + if (!getColumnFamilies().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); + if (hasColumnFamilies()) { + subBuilder.mergeFrom(getColumnFamilies()); + } + input.readMessage(subBuilder, extensionRegistry); + setColumnFamilies(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required .ColumnFamilySchema columnFamilies = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_; + } else { + return columnFamiliesBuilder_.getMessage(); + } + } + public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + columnFamilies_ = value; + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setColumnFamilies( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = builderForValue.build(); + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { + columnFamilies_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); + } else { + columnFamilies_ = value; + } + onChanged(); + } else { + columnFamiliesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + onChanged(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getColumnFamiliesFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + if (columnFamiliesBuilder_ != null) { + return columnFamiliesBuilder_.getMessageOrBuilder(); + } else { + return columnFamilies_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> + getColumnFamiliesFieldBuilder() { + if (columnFamiliesBuilder_ == null) { + columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( + columnFamilies_, + getParentForChildren(), + isClean()); + columnFamilies_ = null; + } + return columnFamiliesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:AddColumnRequest) + } + + static { + defaultInstance = new AddColumnRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddColumnRequest) + } + + public interface AddColumnResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class AddColumnResponse extends + com.google.protobuf.GeneratedMessage + implements AddColumnResponseOrBuilder { + // Use AddColumnResponse.newBuilder() to construct. + private AddColumnResponse(Builder builder) { + super(builder); + } + private AddColumnResponse(boolean noInit) {} + + private static final AddColumnResponse defaultInstance; + public static AddColumnResponse getDefaultInstance() { + return defaultInstance; + } + + public AddColumnResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:AddColumnResponse) + } + + static { + defaultInstance = new AddColumnResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddColumnResponse) + } + + public interface DeleteColumnRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required bytes columnName = 2; + boolean hasColumnName(); + com.google.protobuf.ByteString getColumnName(); + } + public static final class DeleteColumnRequest extends + com.google.protobuf.GeneratedMessage + implements DeleteColumnRequestOrBuilder { + // Use DeleteColumnRequest.newBuilder() to construct. + private DeleteColumnRequest(Builder builder) { + super(builder); + } + private DeleteColumnRequest(boolean noInit) {} + + private static final DeleteColumnRequest defaultInstance; + public static DeleteColumnRequest getDefaultInstance() { + return defaultInstance; + } + + public DeleteColumnRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required bytes columnName = 2; + public static final int COLUMNNAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString columnName_; + public boolean hasColumnName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnName() { + return columnName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + columnName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasColumnName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, columnName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, columnName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasColumnName() == other.hasColumnName()); + if (hasColumnName()) { + result = result && getColumnName() + .equals(other.getColumnName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasColumnName()) { + hash = (37 * hash) + COLUMNNAME_FIELD_NUMBER; + hash = (53 * hash) + getColumnName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + columnName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.columnName_ = columnName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasColumnName()) { + setColumnName(other.getColumnName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasColumnName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required bytes columnName = 2; + private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasColumnName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnName() { + return columnName_; + } + public Builder setColumnName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + columnName_ = value; + onChanged(); + return this; + } + public Builder clearColumnName() { + bitField0_ = (bitField0_ & ~0x00000002); + columnName_ = getDefaultInstance().getColumnName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DeleteColumnRequest) + } + + static { + defaultInstance = new DeleteColumnRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteColumnRequest) + } + + public interface DeleteColumnResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class DeleteColumnResponse extends + com.google.protobuf.GeneratedMessage + implements DeleteColumnResponseOrBuilder { + // Use DeleteColumnResponse.newBuilder() to construct. + private DeleteColumnResponse(Builder builder) { + super(builder); + } + private DeleteColumnResponse(boolean noInit) {} + + private static final DeleteColumnResponse defaultInstance; + public static DeleteColumnResponse getDefaultInstance() { + return defaultInstance; + } + + public DeleteColumnResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:DeleteColumnResponse) + } + + static { + defaultInstance = new DeleteColumnResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteColumnResponse) + } + + public interface ModifyColumnRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required .ColumnFamilySchema columnFamilies = 2; + boolean hasColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); + } + public static final class ModifyColumnRequest extends + com.google.protobuf.GeneratedMessage + implements ModifyColumnRequestOrBuilder { + // Use ModifyColumnRequest.newBuilder() to construct. + private ModifyColumnRequest(Builder builder) { + super(builder); + } + private ModifyColumnRequest(boolean noInit) {} + + private static final ModifyColumnRequest defaultInstance; + public static ModifyColumnRequest getDefaultInstance() { + return defaultInstance; + } + + public ModifyColumnRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .ColumnFamilySchema columnFamilies = 2; + public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + return columnFamilies_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + return columnFamilies_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasColumnFamilies()) { + memoizedIsInitialized = 0; + return false; + } + if (!getColumnFamilies().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, columnFamilies_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, columnFamilies_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasColumnFamilies() == other.hasColumnFamilies()); + if (hasColumnFamilies()) { + result = result && getColumnFamilies() + .equals(other.getColumnFamilies()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasColumnFamilies()) { + hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamilies().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFamiliesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (columnFamiliesBuilder_ == null) { + result.columnFamilies_ = columnFamilies_; + } else { + result.columnFamilies_ = columnFamiliesBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasColumnFamilies()) { + mergeColumnFamilies(other.getColumnFamilies()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasColumnFamilies()) { + + return false; + } + if (!getColumnFamilies().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); + if (hasColumnFamilies()) { + subBuilder.mergeFrom(getColumnFamilies()); + } + input.readMessage(subBuilder, extensionRegistry); + setColumnFamilies(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required .ColumnFamilySchema columnFamilies = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_; + } else { + return columnFamiliesBuilder_.getMessage(); + } + } + public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + columnFamilies_ = value; + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setColumnFamilies( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = builderForValue.build(); + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { + columnFamilies_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); + } else { + columnFamilies_ = value; + } + onChanged(); + } else { + columnFamiliesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + onChanged(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getColumnFamiliesFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + if (columnFamiliesBuilder_ != null) { + return columnFamiliesBuilder_.getMessageOrBuilder(); + } else { + return columnFamilies_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> + getColumnFamiliesFieldBuilder() { + if (columnFamiliesBuilder_ == null) { + columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( + columnFamilies_, + getParentForChildren(), + isClean()); + columnFamilies_ = null; + } + return columnFamiliesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ModifyColumnRequest) + } + + static { + defaultInstance = new ModifyColumnRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyColumnRequest) + } + + public interface ModifyColumnResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ModifyColumnResponse extends + com.google.protobuf.GeneratedMessage + implements ModifyColumnResponseOrBuilder { + // Use ModifyColumnResponse.newBuilder() to construct. + private ModifyColumnResponse(Builder builder) { + super(builder); + } + private ModifyColumnResponse(boolean noInit) {} + + private static final ModifyColumnResponse defaultInstance; + public static ModifyColumnResponse getDefaultInstance() { + return defaultInstance; + } + + public ModifyColumnResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ModifyColumnResponse) + } + + static { + defaultInstance = new ModifyColumnResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyColumnResponse) + } + + public interface MoveRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional .ServerName destServerName = 2; + boolean hasDestServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder(); + } + public static final class MoveRegionRequest extends + com.google.protobuf.GeneratedMessage + implements MoveRegionRequestOrBuilder { + // Use MoveRegionRequest.newBuilder() to construct. + private MoveRegionRequest(Builder builder) { + super(builder); + } + private MoveRegionRequest(boolean noInit) {} + + private static final MoveRegionRequest defaultInstance; + public static MoveRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public MoveRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional .ServerName destServerName = 2; + public static final int DESTSERVERNAME_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_; + public boolean hasDestServerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { + return destServerName_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { + return destServerName_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasDestServerName()) { + if (!getDestServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, destServerName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, destServerName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasDestServerName() == other.hasDestServerName()); + if (hasDestServerName()) { + result = result && getDestServerName() + .equals(other.getDestServerName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasDestServerName()) { + hash = (37 * hash) + DESTSERVERNAME_FIELD_NUMBER; + hash = (53 * hash) + getDestServerName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getDestServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (destServerNameBuilder_ == null) { + destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + destServerNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (destServerNameBuilder_ == null) { + result.destServerName_ = destServerName_; + } else { + result.destServerName_ = destServerNameBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasDestServerName()) { + mergeDestServerName(other.getDestServerName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (hasDestServerName()) { + if (!getDestServerName().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasDestServerName()) { + subBuilder.mergeFrom(getDestServerName()); + } + input.readMessage(subBuilder, extensionRegistry); + setDestServerName(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional .ServerName destServerName = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_; + public boolean hasDestServerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { + if (destServerNameBuilder_ == null) { + return destServerName_; + } else { + return destServerNameBuilder_.getMessage(); + } + } + public Builder setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (destServerNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + destServerName_ = value; + onChanged(); + } else { + destServerNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setDestServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (destServerNameBuilder_ == null) { + destServerName_ = builderForValue.build(); + onChanged(); + } else { + destServerNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (destServerNameBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + destServerName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + destServerName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destServerName_).mergeFrom(value).buildPartial(); + } else { + destServerName_ = value; + } + onChanged(); + } else { + destServerNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearDestServerName() { + if (destServerNameBuilder_ == null) { + destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + destServerNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestServerNameBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getDestServerNameFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { + if (destServerNameBuilder_ != null) { + return destServerNameBuilder_.getMessageOrBuilder(); + } else { + return destServerName_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getDestServerNameFieldBuilder() { + if (destServerNameBuilder_ == null) { + destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + destServerName_, + getParentForChildren(), + isClean()); + destServerName_ = null; + } + return destServerNameBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MoveRegionRequest) + } + + static { + defaultInstance = new MoveRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MoveRegionRequest) + } + + public interface MoveRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class MoveRegionResponse extends + com.google.protobuf.GeneratedMessage + implements MoveRegionResponseOrBuilder { + // Use MoveRegionResponse.newBuilder() to construct. + private MoveRegionResponse(Builder builder) { + super(builder); + } + private MoveRegionResponse(boolean noInit) {} + + private static final MoveRegionResponse defaultInstance; + public static MoveRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public MoveRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:MoveRegionResponse) + } + + static { + defaultInstance = new MoveRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MoveRegionResponse) + } + + public interface AssignRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + public static final class AssignRegionRequest extends + com.google.protobuf.GeneratedMessage + implements AssignRegionRequestOrBuilder { + // Use AssignRegionRequest.newBuilder() to construct. + private AssignRegionRequest(Builder builder) { + super(builder); + } + private AssignRegionRequest(boolean noInit) {} + + private static final AssignRegionRequest defaultInstance; + public static AssignRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public AssignRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:AssignRegionRequest) + } + + static { + defaultInstance = new AssignRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AssignRegionRequest) + } + + public interface AssignRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class AssignRegionResponse extends + com.google.protobuf.GeneratedMessage + implements AssignRegionResponseOrBuilder { + // Use AssignRegionResponse.newBuilder() to construct. + private AssignRegionResponse(Builder builder) { + super(builder); + } + private AssignRegionResponse(boolean noInit) {} + + private static final AssignRegionResponse defaultInstance; + public static AssignRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public AssignRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:AssignRegionResponse) + } + + static { + defaultInstance = new AssignRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AssignRegionResponse) + } + + public interface UnassignRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bool force = 2 [default = false]; + boolean hasForce(); + boolean getForce(); + } + public static final class UnassignRegionRequest extends + com.google.protobuf.GeneratedMessage + implements UnassignRegionRequestOrBuilder { + // Use UnassignRegionRequest.newBuilder() to construct. + private UnassignRegionRequest(Builder builder) { + super(builder); + } + private UnassignRegionRequest(boolean noInit) {} + + private static final UnassignRegionRequest defaultInstance; + public static UnassignRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public UnassignRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bool force = 2 [default = false]; + public static final int FORCE_FIELD_NUMBER = 2; + private boolean force_; + public boolean hasForce() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getForce() { + return force_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + force_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, force_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, force_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasForce() == other.hasForce()); + if (hasForce()) { + result = result && (getForce() + == other.getForce()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasForce()) { + hash = (37 * hash) + FORCE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getForce()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + force_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.force_ = force_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasForce()) { + setForce(other.getForce()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + force_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bool force = 2 [default = false]; + private boolean force_ ; + public boolean hasForce() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getForce() { + return force_; + } + public Builder setForce(boolean value) { + bitField0_ |= 0x00000002; + force_ = value; + onChanged(); + return this; + } + public Builder clearForce() { + bitField0_ = (bitField0_ & ~0x00000002); + force_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UnassignRegionRequest) + } + + static { + defaultInstance = new UnassignRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnassignRegionRequest) + } + + public interface UnassignRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class UnassignRegionResponse extends + com.google.protobuf.GeneratedMessage + implements UnassignRegionResponseOrBuilder { + // Use UnassignRegionResponse.newBuilder() to construct. + private UnassignRegionResponse(Builder builder) { + super(builder); + } + private UnassignRegionResponse(boolean noInit) {} + + private static final UnassignRegionResponse defaultInstance; + public static UnassignRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public UnassignRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:UnassignRegionResponse) + } + + static { + defaultInstance = new UnassignRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnassignRegionResponse) + } + + public interface OfflineRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + public static final class OfflineRegionRequest extends + com.google.protobuf.GeneratedMessage + implements OfflineRegionRequestOrBuilder { + // Use OfflineRegionRequest.newBuilder() to construct. + private OfflineRegionRequest(Builder builder) { + super(builder); + } + private OfflineRegionRequest(boolean noInit) {} + + private static final OfflineRegionRequest defaultInstance; + public static OfflineRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public OfflineRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:OfflineRegionRequest) + } + + static { + defaultInstance = new OfflineRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OfflineRegionRequest) + } + + public interface OfflineRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class OfflineRegionResponse extends + com.google.protobuf.GeneratedMessage + implements OfflineRegionResponseOrBuilder { + // Use OfflineRegionResponse.newBuilder() to construct. + private OfflineRegionResponse(Builder builder) { + super(builder); + } + private OfflineRegionResponse(boolean noInit) {} + + private static final OfflineRegionResponse defaultInstance; + public static OfflineRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public OfflineRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:OfflineRegionResponse) + } + + static { + defaultInstance = new OfflineRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OfflineRegionResponse) + } + + public interface CreateTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .TableSchema tableSchema = 1; + boolean hasTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); + + // repeated bytes splitKeys = 2; + java.util.List getSplitKeysList(); + int getSplitKeysCount(); + com.google.protobuf.ByteString getSplitKeys(int index); + } + public static final class CreateTableRequest extends + com.google.protobuf.GeneratedMessage + implements CreateTableRequestOrBuilder { + // Use CreateTableRequest.newBuilder() to construct. + private CreateTableRequest(Builder builder) { + super(builder); + } + private CreateTableRequest(boolean noInit) {} + + private static final CreateTableRequest defaultInstance; + public static CreateTableRequest getDefaultInstance() { + return defaultInstance; + } + + public CreateTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .TableSchema tableSchema = 1; + public static final int TABLESCHEMA_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + return tableSchema_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + return tableSchema_; + } + + // repeated bytes splitKeys = 2; + public static final int SPLITKEYS_FIELD_NUMBER = 2; + private java.util.List splitKeys_; + public java.util.List + getSplitKeysList() { + return splitKeys_; + } + public int getSplitKeysCount() { + return splitKeys_.size(); + } + public com.google.protobuf.ByteString getSplitKeys(int index) { + return splitKeys_.get(index); + } + + private void initFields() { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + splitKeys_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableSchema()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableSchema().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, tableSchema_); + } + for (int i = 0; i < splitKeys_.size(); i++) { + output.writeBytes(2, splitKeys_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, tableSchema_); + } + { + int dataSize = 0; + for (int i = 0; i < splitKeys_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(splitKeys_.get(i)); + } + size += dataSize; + size += 1 * getSplitKeysList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) obj; + + boolean result = true; + result = result && (hasTableSchema() == other.hasTableSchema()); + if (hasTableSchema()) { + result = result && getTableSchema() + .equals(other.getTableSchema()); + } + result = result && getSplitKeysList() + .equals(other.getSplitKeysList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableSchema()) { + hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getTableSchema().hashCode(); + } + if (getSplitKeysCount() > 0) { + hash = (37 * hash) + SPLITKEYS_FIELD_NUMBER; + hash = (53 * hash) + getSplitKeysList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableSchemaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + splitKeys_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (tableSchemaBuilder_ == null) { + result.tableSchema_ = tableSchema_; + } else { + result.tableSchema_ = tableSchemaBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.splitKeys_ = splitKeys_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance()) return this; + if (other.hasTableSchema()) { + mergeTableSchema(other.getTableSchema()); + } + if (!other.splitKeys_.isEmpty()) { + if (splitKeys_.isEmpty()) { + splitKeys_ = other.splitKeys_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureSplitKeysIsMutable(); + splitKeys_.addAll(other.splitKeys_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableSchema()) { + + return false; + } + if (!getTableSchema().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); + if (hasTableSchema()) { + subBuilder.mergeFrom(getTableSchema()); + } + input.readMessage(subBuilder, extensionRegistry); + setTableSchema(subBuilder.buildPartial()); + break; + } + case 18: { + ensureSplitKeysIsMutable(); + splitKeys_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .TableSchema tableSchema = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + if (tableSchemaBuilder_ == null) { + return tableSchema_; + } else { + return tableSchemaBuilder_.getMessage(); + } + } + public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableSchema_ = value; + onChanged(); + } else { + tableSchemaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setTableSchema( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + tableSchema_ = builderForValue.build(); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { + tableSchema_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); + } else { + tableSchema_ = value; + } + onChanged(); + } else { + tableSchemaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearTableSchema() { + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + onChanged(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableSchemaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + if (tableSchemaBuilder_ != null) { + return tableSchemaBuilder_.getMessageOrBuilder(); + } else { + return tableSchema_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> + getTableSchemaFieldBuilder() { + if (tableSchemaBuilder_ == null) { + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( + tableSchema_, + getParentForChildren(), + isClean()); + tableSchema_ = null; + } + return tableSchemaBuilder_; + } + + // repeated bytes splitKeys = 2; + private java.util.List splitKeys_ = java.util.Collections.emptyList();; + private void ensureSplitKeysIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + splitKeys_ = new java.util.ArrayList(splitKeys_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getSplitKeysList() { + return java.util.Collections.unmodifiableList(splitKeys_); + } + public int getSplitKeysCount() { + return splitKeys_.size(); + } + public com.google.protobuf.ByteString getSplitKeys(int index) { + return splitKeys_.get(index); + } + public Builder setSplitKeys( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSplitKeysIsMutable(); + splitKeys_.set(index, value); + onChanged(); + return this; + } + public Builder addSplitKeys(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSplitKeysIsMutable(); + splitKeys_.add(value); + onChanged(); + return this; + } + public Builder addAllSplitKeys( + java.lang.Iterable values) { + ensureSplitKeysIsMutable(); + super.addAll(values, splitKeys_); + onChanged(); + return this; + } + public Builder clearSplitKeys() { + splitKeys_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CreateTableRequest) + } + + static { + defaultInstance = new CreateTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CreateTableRequest) + } + + public interface CreateTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CreateTableResponse extends + com.google.protobuf.GeneratedMessage + implements CreateTableResponseOrBuilder { + // Use CreateTableResponse.newBuilder() to construct. + private CreateTableResponse(Builder builder) { + super(builder); + } + private CreateTableResponse(boolean noInit) {} + + private static final CreateTableResponse defaultInstance; + public static CreateTableResponse getDefaultInstance() { + return defaultInstance; + } + + public CreateTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CreateTableResponse) + } + + static { + defaultInstance = new CreateTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CreateTableResponse) + } + + public interface DeleteTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class DeleteTableRequest extends + com.google.protobuf.GeneratedMessage + implements DeleteTableRequestOrBuilder { + // Use DeleteTableRequest.newBuilder() to construct. + private DeleteTableRequest(Builder builder) { + super(builder); + } + private DeleteTableRequest(boolean noInit) {} + + private static final DeleteTableRequest defaultInstance; + public static DeleteTableRequest getDefaultInstance() { + return defaultInstance; + } + + public DeleteTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DeleteTableRequest) + } + + static { + defaultInstance = new DeleteTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteTableRequest) + } + + public interface DeleteTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class DeleteTableResponse extends + com.google.protobuf.GeneratedMessage + implements DeleteTableResponseOrBuilder { + // Use DeleteTableResponse.newBuilder() to construct. + private DeleteTableResponse(Builder builder) { + super(builder); + } + private DeleteTableResponse(boolean noInit) {} + + private static final DeleteTableResponse defaultInstance; + public static DeleteTableResponse getDefaultInstance() { + return defaultInstance; + } + + public DeleteTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:DeleteTableResponse) + } + + static { + defaultInstance = new DeleteTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteTableResponse) + } + + public interface EnableTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class EnableTableRequest extends + com.google.protobuf.GeneratedMessage + implements EnableTableRequestOrBuilder { + // Use EnableTableRequest.newBuilder() to construct. + private EnableTableRequest(Builder builder) { + super(builder); + } + private EnableTableRequest(boolean noInit) {} + + private static final EnableTableRequest defaultInstance; + public static EnableTableRequest getDefaultInstance() { + return defaultInstance; + } + + public EnableTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:EnableTableRequest) + } + + static { + defaultInstance = new EnableTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EnableTableRequest) + } + + public interface EnableTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class EnableTableResponse extends + com.google.protobuf.GeneratedMessage + implements EnableTableResponseOrBuilder { + // Use EnableTableResponse.newBuilder() to construct. + private EnableTableResponse(Builder builder) { + super(builder); + } + private EnableTableResponse(boolean noInit) {} + + private static final EnableTableResponse defaultInstance; + public static EnableTableResponse getDefaultInstance() { + return defaultInstance; + } + + public EnableTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:EnableTableResponse) + } + + static { + defaultInstance = new EnableTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EnableTableResponse) + } + + public interface DisableTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class DisableTableRequest extends + com.google.protobuf.GeneratedMessage + implements DisableTableRequestOrBuilder { + // Use DisableTableRequest.newBuilder() to construct. + private DisableTableRequest(Builder builder) { + super(builder); + } + private DisableTableRequest(boolean noInit) {} + + private static final DisableTableRequest defaultInstance; + public static DisableTableRequest getDefaultInstance() { + return defaultInstance; + } + + public DisableTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DisableTableRequest) + } + + static { + defaultInstance = new DisableTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DisableTableRequest) + } + + public interface DisableTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class DisableTableResponse extends + com.google.protobuf.GeneratedMessage + implements DisableTableResponseOrBuilder { + // Use DisableTableResponse.newBuilder() to construct. + private DisableTableResponse(Builder builder) { + super(builder); + } + private DisableTableResponse(boolean noInit) {} + + private static final DisableTableResponse defaultInstance; + public static DisableTableResponse getDefaultInstance() { + return defaultInstance; + } + + public DisableTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:DisableTableResponse) + } + + static { + defaultInstance = new DisableTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DisableTableResponse) + } + + public interface ModifyTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required .TableSchema tableSchema = 2; + boolean hasTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); + } + public static final class ModifyTableRequest extends + com.google.protobuf.GeneratedMessage + implements ModifyTableRequestOrBuilder { + // Use ModifyTableRequest.newBuilder() to construct. + private ModifyTableRequest(Builder builder) { + super(builder); + } + private ModifyTableRequest(boolean noInit) {} + + private static final ModifyTableRequest defaultInstance; + public static ModifyTableRequest getDefaultInstance() { + return defaultInstance; + } + + public ModifyTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .TableSchema tableSchema = 2; + public static final int TABLESCHEMA_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + return tableSchema_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + return tableSchema_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableSchema()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableSchema().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, tableSchema_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, tableSchema_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasTableSchema() == other.hasTableSchema()); + if (hasTableSchema()) { + result = result && getTableSchema() + .equals(other.getTableSchema()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasTableSchema()) { + hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getTableSchema().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableSchemaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (tableSchemaBuilder_ == null) { + result.tableSchema_ = tableSchema_; + } else { + result.tableSchema_ = tableSchemaBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasTableSchema()) { + mergeTableSchema(other.getTableSchema()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasTableSchema()) { + + return false; + } + if (!getTableSchema().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); + if (hasTableSchema()) { + subBuilder.mergeFrom(getTableSchema()); + } + input.readMessage(subBuilder, extensionRegistry); + setTableSchema(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required .TableSchema tableSchema = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + if (tableSchemaBuilder_ == null) { + return tableSchema_; + } else { + return tableSchemaBuilder_.getMessage(); + } + } + public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableSchema_ = value; + onChanged(); + } else { + tableSchemaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setTableSchema( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + tableSchema_ = builderForValue.build(); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { + tableSchema_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); + } else { + tableSchema_ = value; + } + onChanged(); + } else { + tableSchemaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearTableSchema() { + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + onChanged(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getTableSchemaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + if (tableSchemaBuilder_ != null) { + return tableSchemaBuilder_.getMessageOrBuilder(); + } else { + return tableSchema_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> + getTableSchemaFieldBuilder() { + if (tableSchemaBuilder_ == null) { + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( + tableSchema_, + getParentForChildren(), + isClean()); + tableSchema_ = null; + } + return tableSchemaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ModifyTableRequest) + } + + static { + defaultInstance = new ModifyTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyTableRequest) + } + + public interface ModifyTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ModifyTableResponse extends + com.google.protobuf.GeneratedMessage + implements ModifyTableResponseOrBuilder { + // Use ModifyTableResponse.newBuilder() to construct. + private ModifyTableResponse(Builder builder) { + super(builder); + } + private ModifyTableResponse(boolean noInit) {} + + private static final ModifyTableResponse defaultInstance; + public static ModifyTableResponse getDefaultInstance() { + return defaultInstance; + } + + public ModifyTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ModifyTableResponse) + } + + static { + defaultInstance = new ModifyTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyTableResponse) + } + + public interface ShutdownRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ShutdownRequest extends + com.google.protobuf.GeneratedMessage + implements ShutdownRequestOrBuilder { + // Use ShutdownRequest.newBuilder() to construct. + private ShutdownRequest(Builder builder) { + super(builder); + } + private ShutdownRequest(boolean noInit) {} + + private static final ShutdownRequest defaultInstance; + public static ShutdownRequest getDefaultInstance() { + return defaultInstance; + } + + public ShutdownRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ShutdownRequest) + } + + static { + defaultInstance = new ShutdownRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ShutdownRequest) + } + + public interface ShutdownResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ShutdownResponse extends + com.google.protobuf.GeneratedMessage + implements ShutdownResponseOrBuilder { + // Use ShutdownResponse.newBuilder() to construct. + private ShutdownResponse(Builder builder) { + super(builder); + } + private ShutdownResponse(boolean noInit) {} + + private static final ShutdownResponse defaultInstance; + public static ShutdownResponse getDefaultInstance() { + return defaultInstance; + } + + public ShutdownResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ShutdownResponse) + } + + static { + defaultInstance = new ShutdownResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ShutdownResponse) + } + + public interface StopMasterRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class StopMasterRequest extends + com.google.protobuf.GeneratedMessage + implements StopMasterRequestOrBuilder { + // Use StopMasterRequest.newBuilder() to construct. + private StopMasterRequest(Builder builder) { + super(builder); + } + private StopMasterRequest(boolean noInit) {} + + private static final StopMasterRequest defaultInstance; + public static StopMasterRequest getDefaultInstance() { + return defaultInstance; + } + + public StopMasterRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:StopMasterRequest) + } + + static { + defaultInstance = new StopMasterRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopMasterRequest) + } + + public interface StopMasterResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class StopMasterResponse extends + com.google.protobuf.GeneratedMessage + implements StopMasterResponseOrBuilder { + // Use StopMasterResponse.newBuilder() to construct. + private StopMasterResponse(Builder builder) { + super(builder); + } + private StopMasterResponse(boolean noInit) {} + + private static final StopMasterResponse defaultInstance; + public static StopMasterResponse getDefaultInstance() { + return defaultInstance; + } + + public StopMasterResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:StopMasterResponse) + } + + static { + defaultInstance = new StopMasterResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopMasterResponse) + } + + public interface BalanceRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class BalanceRequest extends + com.google.protobuf.GeneratedMessage + implements BalanceRequestOrBuilder { + // Use BalanceRequest.newBuilder() to construct. + private BalanceRequest(Builder builder) { + super(builder); + } + private BalanceRequest(boolean noInit) {} + + private static final BalanceRequest defaultInstance; + public static BalanceRequest getDefaultInstance() { + return defaultInstance; + } + + public BalanceRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:BalanceRequest) + } + + static { + defaultInstance = new BalanceRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BalanceRequest) + } + + public interface BalanceResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool balancerRan = 1; + boolean hasBalancerRan(); + boolean getBalancerRan(); + } + public static final class BalanceResponse extends + com.google.protobuf.GeneratedMessage + implements BalanceResponseOrBuilder { + // Use BalanceResponse.newBuilder() to construct. + private BalanceResponse(Builder builder) { + super(builder); + } + private BalanceResponse(boolean noInit) {} + + private static final BalanceResponse defaultInstance; + public static BalanceResponse getDefaultInstance() { + return defaultInstance; + } + + public BalanceResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool balancerRan = 1; + public static final int BALANCERRAN_FIELD_NUMBER = 1; + private boolean balancerRan_; + public boolean hasBalancerRan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getBalancerRan() { + return balancerRan_; + } + + private void initFields() { + balancerRan_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasBalancerRan()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, balancerRan_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, balancerRan_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) obj; + + boolean result = true; + result = result && (hasBalancerRan() == other.hasBalancerRan()); + if (hasBalancerRan()) { + result = result && (getBalancerRan() + == other.getBalancerRan()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasBalancerRan()) { + hash = (37 * hash) + BALANCERRAN_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getBalancerRan()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + balancerRan_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.balancerRan_ = balancerRan_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()) return this; + if (other.hasBalancerRan()) { + setBalancerRan(other.getBalancerRan()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasBalancerRan()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + balancerRan_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool balancerRan = 1; + private boolean balancerRan_ ; + public boolean hasBalancerRan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getBalancerRan() { + return balancerRan_; + } + public Builder setBalancerRan(boolean value) { + bitField0_ |= 0x00000001; + balancerRan_ = value; + onChanged(); + return this; + } + public Builder clearBalancerRan() { + bitField0_ = (bitField0_ & ~0x00000001); + balancerRan_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BalanceResponse) + } + + static { + defaultInstance = new BalanceResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BalanceResponse) + } + + public interface SetBalancerRunningRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool on = 1; + boolean hasOn(); + boolean getOn(); + + // optional bool synchronous = 2; + boolean hasSynchronous(); + boolean getSynchronous(); + } + public static final class SetBalancerRunningRequest extends + com.google.protobuf.GeneratedMessage + implements SetBalancerRunningRequestOrBuilder { + // Use SetBalancerRunningRequest.newBuilder() to construct. + private SetBalancerRunningRequest(Builder builder) { + super(builder); + } + private SetBalancerRunningRequest(boolean noInit) {} + + private static final SetBalancerRunningRequest defaultInstance; + public static SetBalancerRunningRequest getDefaultInstance() { + return defaultInstance; + } + + public SetBalancerRunningRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bool on = 1; + public static final int ON_FIELD_NUMBER = 1; + private boolean on_; + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getOn() { + return on_; + } + + // optional bool synchronous = 2; + public static final int SYNCHRONOUS_FIELD_NUMBER = 2; + private boolean synchronous_; + public boolean hasSynchronous() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getSynchronous() { + return synchronous_; + } + + private void initFields() { + on_ = false; + synchronous_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasOn()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, on_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, synchronous_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, on_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, synchronous_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) obj; + + boolean result = true; + result = result && (hasOn() == other.hasOn()); + if (hasOn()) { + result = result && (getOn() + == other.getOn()); + } + result = result && (hasSynchronous() == other.hasSynchronous()); + if (hasSynchronous()) { + result = result && (getSynchronous() + == other.getSynchronous()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasOn()) { + hash = (37 * hash) + ON_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getOn()); + } + if (hasSynchronous()) { + hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getSynchronous()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + on_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + synchronous_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.on_ = on_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.synchronous_ = synchronous_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance()) return this; + if (other.hasOn()) { + setOn(other.getOn()); + } + if (other.hasSynchronous()) { + setSynchronous(other.getSynchronous()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasOn()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + on_ = input.readBool(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + synchronous_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool on = 1; + private boolean on_ ; + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getOn() { + return on_; + } + public Builder setOn(boolean value) { + bitField0_ |= 0x00000001; + on_ = value; + onChanged(); + return this; + } + public Builder clearOn() { + bitField0_ = (bitField0_ & ~0x00000001); + on_ = false; + onChanged(); + return this; + } + + // optional bool synchronous = 2; + private boolean synchronous_ ; + public boolean hasSynchronous() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getSynchronous() { + return synchronous_; + } + public Builder setSynchronous(boolean value) { + bitField0_ |= 0x00000002; + synchronous_ = value; + onChanged(); + return this; + } + public Builder clearSynchronous() { + bitField0_ = (bitField0_ & ~0x00000002); + synchronous_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SetBalancerRunningRequest) + } + + static { + defaultInstance = new SetBalancerRunningRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SetBalancerRunningRequest) + } + + public interface SetBalancerRunningResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bool prevBalanceValue = 1; + boolean hasPrevBalanceValue(); + boolean getPrevBalanceValue(); + } + public static final class SetBalancerRunningResponse extends + com.google.protobuf.GeneratedMessage + implements SetBalancerRunningResponseOrBuilder { + // Use SetBalancerRunningResponse.newBuilder() to construct. + private SetBalancerRunningResponse(Builder builder) { + super(builder); + } + private SetBalancerRunningResponse(boolean noInit) {} + + private static final SetBalancerRunningResponse defaultInstance; + public static SetBalancerRunningResponse getDefaultInstance() { + return defaultInstance; + } + + public SetBalancerRunningResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional bool prevBalanceValue = 1; + public static final int PREVBALANCEVALUE_FIELD_NUMBER = 1; + private boolean prevBalanceValue_; + public boolean hasPrevBalanceValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getPrevBalanceValue() { + return prevBalanceValue_; + } + + private void initFields() { + prevBalanceValue_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, prevBalanceValue_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, prevBalanceValue_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) obj; + + boolean result = true; + result = result && (hasPrevBalanceValue() == other.hasPrevBalanceValue()); + if (hasPrevBalanceValue()) { + result = result && (getPrevBalanceValue() + == other.getPrevBalanceValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrevBalanceValue()) { + hash = (37 * hash) + PREVBALANCEVALUE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getPrevBalanceValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + prevBalanceValue_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prevBalanceValue_ = prevBalanceValue_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()) return this; + if (other.hasPrevBalanceValue()) { + setPrevBalanceValue(other.getPrevBalanceValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevBalanceValue_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional bool prevBalanceValue = 1; + private boolean prevBalanceValue_ ; + public boolean hasPrevBalanceValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getPrevBalanceValue() { + return prevBalanceValue_; + } + public Builder setPrevBalanceValue(boolean value) { + bitField0_ |= 0x00000001; + prevBalanceValue_ = value; + onChanged(); + return this; + } + public Builder clearPrevBalanceValue() { + bitField0_ = (bitField0_ & ~0x00000001); + prevBalanceValue_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SetBalancerRunningResponse) + } + + static { + defaultInstance = new SetBalancerRunningResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) + } + + public interface CatalogScanRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CatalogScanRequest extends + com.google.protobuf.GeneratedMessage + implements CatalogScanRequestOrBuilder { + // Use CatalogScanRequest.newBuilder() to construct. + private CatalogScanRequest(Builder builder) { + super(builder); + } + private CatalogScanRequest(boolean noInit) {} + + private static final CatalogScanRequest defaultInstance; + public static CatalogScanRequest getDefaultInstance() { + return defaultInstance; + } + + public CatalogScanRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CatalogScanRequest) + } + + static { + defaultInstance = new CatalogScanRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CatalogScanRequest) + } + + public interface CatalogScanResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional int32 scanResult = 1; + boolean hasScanResult(); + int getScanResult(); + } + public static final class CatalogScanResponse extends + com.google.protobuf.GeneratedMessage + implements CatalogScanResponseOrBuilder { + // Use CatalogScanResponse.newBuilder() to construct. + private CatalogScanResponse(Builder builder) { + super(builder); + } + private CatalogScanResponse(boolean noInit) {} + + private static final CatalogScanResponse defaultInstance; + public static CatalogScanResponse getDefaultInstance() { + return defaultInstance; + } + + public CatalogScanResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional int32 scanResult = 1; + public static final int SCANRESULT_FIELD_NUMBER = 1; + private int scanResult_; + public boolean hasScanResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getScanResult() { + return scanResult_; + } + + private void initFields() { + scanResult_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, scanResult_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, scanResult_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) obj; + + boolean result = true; + result = result && (hasScanResult() == other.hasScanResult()); + if (hasScanResult()) { + result = result && (getScanResult() + == other.getScanResult()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasScanResult()) { + hash = (37 * hash) + SCANRESULT_FIELD_NUMBER; + hash = (53 * hash) + getScanResult(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + scanResult_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.scanResult_ = scanResult_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()) return this; + if (other.hasScanResult()) { + setScanResult(other.getScanResult()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + scanResult_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // optional int32 scanResult = 1; + private int scanResult_ ; + public boolean hasScanResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getScanResult() { + return scanResult_; + } + public Builder setScanResult(int value) { + bitField0_ |= 0x00000001; + scanResult_ = value; + onChanged(); + return this; + } + public Builder clearScanResult() { + bitField0_ = (bitField0_ & ~0x00000001); + scanResult_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CatalogScanResponse) + } + + static { + defaultInstance = new CatalogScanResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CatalogScanResponse) + } + + public interface EnableCatalogJanitorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool enable = 1; + boolean hasEnable(); + boolean getEnable(); + } + public static final class EnableCatalogJanitorRequest extends + com.google.protobuf.GeneratedMessage + implements EnableCatalogJanitorRequestOrBuilder { + // Use EnableCatalogJanitorRequest.newBuilder() to construct. + private EnableCatalogJanitorRequest(Builder builder) { + super(builder); + } + private EnableCatalogJanitorRequest(boolean noInit) {} + + private static final EnableCatalogJanitorRequest defaultInstance; + public static EnableCatalogJanitorRequest getDefaultInstance() { + return defaultInstance; + } + + public EnableCatalogJanitorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bool enable = 1; + public static final int ENABLE_FIELD_NUMBER = 1; + private boolean enable_; + public boolean hasEnable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getEnable() { + return enable_; + } + + private void initFields() { + enable_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEnable()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, enable_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, enable_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) obj; + + boolean result = true; + result = result && (hasEnable() == other.hasEnable()); + if (hasEnable()) { + result = result && (getEnable() + == other.getEnable()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEnable()) { + hash = (37 * hash) + ENABLE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getEnable()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + enable_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.enable_ = enable_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance()) return this; + if (other.hasEnable()) { + setEnable(other.getEnable()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEnable()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + enable_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool enable = 1; + private boolean enable_ ; + public boolean hasEnable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getEnable() { + return enable_; + } + public Builder setEnable(boolean value) { + bitField0_ |= 0x00000001; + enable_ = value; + onChanged(); + return this; + } + public Builder clearEnable() { + bitField0_ = (bitField0_ & ~0x00000001); + enable_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorRequest) + } + + static { + defaultInstance = new EnableCatalogJanitorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EnableCatalogJanitorRequest) + } + + public interface EnableCatalogJanitorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bool prevValue = 1; + boolean hasPrevValue(); + boolean getPrevValue(); + } + public static final class EnableCatalogJanitorResponse extends + com.google.protobuf.GeneratedMessage + implements EnableCatalogJanitorResponseOrBuilder { + // Use EnableCatalogJanitorResponse.newBuilder() to construct. + private EnableCatalogJanitorResponse(Builder builder) { + super(builder); + } + private EnableCatalogJanitorResponse(boolean noInit) {} + + private static final EnableCatalogJanitorResponse defaultInstance; + public static EnableCatalogJanitorResponse getDefaultInstance() { + return defaultInstance; + } + + public EnableCatalogJanitorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional bool prevValue = 1; + public static final int PREVVALUE_FIELD_NUMBER = 1; + private boolean prevValue_; + public boolean hasPrevValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getPrevValue() { + return prevValue_; + } + + private void initFields() { + prevValue_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, prevValue_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, prevValue_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) obj; + + boolean result = true; + result = result && (hasPrevValue() == other.hasPrevValue()); + if (hasPrevValue()) { + result = result && (getPrevValue() + == other.getPrevValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrevValue()) { + hash = (37 * hash) + PREVVALUE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getPrevValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + prevValue_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prevValue_ = prevValue_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()) return this; + if (other.hasPrevValue()) { + setPrevValue(other.getPrevValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevValue_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional bool prevValue = 1; + private boolean prevValue_ ; + public boolean hasPrevValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getPrevValue() { + return prevValue_; + } + public Builder setPrevValue(boolean value) { + bitField0_ |= 0x00000001; + prevValue_ = value; + onChanged(); + return this; + } + public Builder clearPrevValue() { + bitField0_ = (bitField0_ & ~0x00000001); + prevValue_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorResponse) + } + + static { + defaultInstance = new EnableCatalogJanitorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EnableCatalogJanitorResponse) + } + + public interface IsCatalogJanitorEnabledRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class IsCatalogJanitorEnabledRequest extends + com.google.protobuf.GeneratedMessage + implements IsCatalogJanitorEnabledRequestOrBuilder { + // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct. + private IsCatalogJanitorEnabledRequest(Builder builder) { + super(builder); + } + private IsCatalogJanitorEnabledRequest(boolean noInit) {} + + private static final IsCatalogJanitorEnabledRequest defaultInstance; + public static IsCatalogJanitorEnabledRequest getDefaultInstance() { + return defaultInstance; + } + + public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledRequest) + } + + static { + defaultInstance = new IsCatalogJanitorEnabledRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledRequest) + } + + public interface IsCatalogJanitorEnabledResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool value = 1; + boolean hasValue(); + boolean getValue(); + } + public static final class IsCatalogJanitorEnabledResponse extends + com.google.protobuf.GeneratedMessage + implements IsCatalogJanitorEnabledResponseOrBuilder { + // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct. + private IsCatalogJanitorEnabledResponse(Builder builder) { + super(builder); + } + private IsCatalogJanitorEnabledResponse(boolean noInit) {} + + private static final IsCatalogJanitorEnabledResponse defaultInstance; + public static IsCatalogJanitorEnabledResponse getDefaultInstance() { + return defaultInstance; + } + + public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private boolean value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getValue() { + return value_; + } + + private void initFields() { + value_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && (getValue() + == other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + value_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + value_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool value = 1; + private boolean value_ ; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getValue() { + return value_; + } + public Builder setValue(boolean value) { + bitField0_ |= 0x00000001; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledResponse) + } + + static { + defaultInstance = new IsCatalogJanitorEnabledResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse) + } + + public static abstract class MasterAdminService + implements com.google.protobuf.Service { + protected MasterAdminService() {} + + public interface Interface { + public abstract void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void moveRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void assignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unassignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void shutdown( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopMaster( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void balance( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void setBalancerRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void runCatalogScan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void enableCatalogJanitor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void isCatalogJanitorEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execMasterService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new MasterAdminService() { + @java.lang.Override + public void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done) { + impl.addColumn(controller, request, done); + } + + @java.lang.Override + public void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done) { + impl.deleteColumn(controller, request, done); + } + + @java.lang.Override + public void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done) { + impl.modifyColumn(controller, request, done); + } + + @java.lang.Override + public void moveRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.moveRegion(controller, request, done); + } + + @java.lang.Override + public void assignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.assignRegion(controller, request, done); + } + + @java.lang.Override + public void unassignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.unassignRegion(controller, request, done); + } + + @java.lang.Override + public void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.offlineRegion(controller, request, done); + } + + @java.lang.Override + public void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.deleteTable(controller, request, done); + } + + @java.lang.Override + public void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.enableTable(controller, request, done); + } + + @java.lang.Override + public void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.disableTable(controller, request, done); + } + + @java.lang.Override + public void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.modifyTable(controller, request, done); + } + + @java.lang.Override + public void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.createTable(controller, request, done); + } + + @java.lang.Override + public void shutdown( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, + com.google.protobuf.RpcCallback done) { + impl.shutdown(controller, request, done); + } + + @java.lang.Override + public void stopMaster( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, + com.google.protobuf.RpcCallback done) { + impl.stopMaster(controller, request, done); + } + + @java.lang.Override + public void balance( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, + com.google.protobuf.RpcCallback done) { + impl.balance(controller, request, done); + } + + @java.lang.Override + public void setBalancerRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, + com.google.protobuf.RpcCallback done) { + impl.setBalancerRunning(controller, request, done); + } + + @java.lang.Override + public void runCatalogScan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, + com.google.protobuf.RpcCallback done) { + impl.runCatalogScan(controller, request, done); + } + + @java.lang.Override + public void enableCatalogJanitor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, + com.google.protobuf.RpcCallback done) { + impl.enableCatalogJanitor(controller, request, done); + } + + @java.lang.Override + public void isCatalogJanitorEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, + com.google.protobuf.RpcCallback done) { + impl.isCatalogJanitorEnabled(controller, request, done); + } + + @java.lang.Override + public void execMasterService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done) { + impl.execMasterService(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)request); + case 1: + return impl.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)request); + case 2: + return impl.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)request); + case 3: + return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)request); + case 4: + return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)request); + case 5: + return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)request); + case 6: + return impl.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)request); + case 7: + return impl.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)request); + case 8: + return impl.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)request); + case 9: + return impl.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)request); + case 10: + return impl.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)request); + case 11: + return impl.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)request); + case 12: + return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)request); + case 13: + return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)request); + case 14: + return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)request); + case 15: + return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request); + case 16: + return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request); + case 17: + return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request); + case 18: + return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)request); + case 19: + return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); + case 16: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); + case 19: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); + case 16: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); + case 19: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void moveRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void assignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unassignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void shutdown( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopMaster( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void balance( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void setBalancerRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void runCatalogScan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void enableCatalogJanitor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void isCatalogJanitorEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execMasterService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 8: + this.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 9: + this.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 10: + this.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 11: + this.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 12: + this.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 13: + this.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 14: + this.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 15: + this.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 16: + this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 17: + this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 18: + this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 19: + this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); + case 16: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); + case 19: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); + case 16: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); + case 19: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MasterAdminService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance())); + } + + public void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance())); + } + + public void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance())); + } + + public void moveRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance())); + } + + public void assignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance())); + } + + public void unassignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance())); + } + + public void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance())); + } + + public void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance())); + } + + public void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance())); + } + + public void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance())); + } + + public void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance())); + } + + public void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance())); + } + + public void shutdown( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(12), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance())); + } + + public void stopMaster( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(13), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance())); + } + + public void balance( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(14), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance())); + } + + public void setBalancerRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(15), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance())); + } + + public void runCatalogScan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(16), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance())); + } + + public void enableCatalogJanitor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(17), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance())); + } + + public void isCatalogJanitorEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(18), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance())); + } + + public void execMasterService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(19), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(12), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(13), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(14), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(15), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(16), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(17), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(18), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(19), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddColumnRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddColumnRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddColumnResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddColumnResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteColumnRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteColumnRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteColumnResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteColumnResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyColumnRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyColumnRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyColumnResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyColumnResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MoveRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MoveRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MoveRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MoveRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AssignRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AssignRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AssignRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AssignRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnassignRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnassignRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnassignRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnassignRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OfflineRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OfflineRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OfflineRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OfflineRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CreateTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CreateTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CreateTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CreateTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EnableTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EnableTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EnableTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EnableTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DisableTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DisableTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DisableTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DisableTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ShutdownRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ShutdownRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ShutdownResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ShutdownResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopMasterRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopMasterRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopMasterResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopMasterResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BalanceRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BalanceRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BalanceResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BalanceResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SetBalancerRunningRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SetBalancerRunningRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SetBalancerRunningResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SetBalancerRunningResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CatalogScanRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CatalogScanRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CatalogScanResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CatalogScanResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EnableCatalogJanitorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EnableCatalogJanitorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IsCatalogJanitorEnabledRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IsCatalogJanitorEnabledResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\021MasterAdmin.proto\032\013hbase.proto\032\014Client" + + ".proto\"R\n\020AddColumnRequest\022\021\n\ttableName\030" + + "\001 \002(\014\022+\n\016columnFamilies\030\002 \002(\0132\023.ColumnFa" + + "milySchema\"\023\n\021AddColumnResponse\"<\n\023Delet" + + "eColumnRequest\022\021\n\ttableName\030\001 \002(\014\022\022\n\ncol" + + "umnName\030\002 \002(\014\"\026\n\024DeleteColumnResponse\"U\n" + + "\023ModifyColumnRequest\022\021\n\ttableName\030\001 \002(\014\022" + + "+\n\016columnFamilies\030\002 \002(\0132\023.ColumnFamilySc" + + "hema\"\026\n\024ModifyColumnResponse\"Z\n\021MoveRegi" + + "onRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif", + "ier\022#\n\016destServerName\030\002 \001(\0132\013.ServerName" + + "\"\024\n\022MoveRegionResponse\"7\n\023AssignRegionRe" + + "quest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\"" + + "\026\n\024AssignRegionResponse\"O\n\025UnassignRegio" + + "nRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + + "er\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026UnassignRegi" + + "onResponse\"8\n\024OfflineRegionRequest\022 \n\006re" + + "gion\030\001 \002(\0132\020.RegionSpecifier\"\027\n\025OfflineR" + + "egionResponse\"J\n\022CreateTableRequest\022!\n\013t" + + "ableSchema\030\001 \002(\0132\014.TableSchema\022\021\n\tsplitK", + "eys\030\002 \003(\014\"\025\n\023CreateTableResponse\"\'\n\022Dele" + + "teTableRequest\022\021\n\ttableName\030\001 \002(\014\"\025\n\023Del" + + "eteTableResponse\"\'\n\022EnableTableRequest\022\021" + + "\n\ttableName\030\001 \002(\014\"\025\n\023EnableTableResponse" + + "\"(\n\023DisableTableRequest\022\021\n\ttableName\030\001 \002" + + "(\014\"\026\n\024DisableTableResponse\"J\n\022ModifyTabl" + + "eRequest\022\021\n\ttableName\030\001 \002(\014\022!\n\013tableSche" + + "ma\030\002 \002(\0132\014.TableSchema\"\025\n\023ModifyTableRes" + + "ponse\"\021\n\017ShutdownRequest\"\022\n\020ShutdownResp" + + "onse\"\023\n\021StopMasterRequest\"\024\n\022StopMasterR", + "esponse\"\020\n\016BalanceRequest\"&\n\017BalanceResp" + + "onse\022\023\n\013balancerRan\030\001 \002(\010\"<\n\031SetBalancer" + + "RunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronou" + + "s\030\002 \001(\010\"6\n\032SetBalancerRunningResponse\022\030\n" + + "\020prevBalanceValue\030\001 \001(\010\"\024\n\022CatalogScanRe" + + "quest\")\n\023CatalogScanResponse\022\022\n\nscanResu" + + "lt\030\001 \001(\005\"-\n\033EnableCatalogJanitorRequest\022" + + "\016\n\006enable\030\001 \002(\010\"1\n\034EnableCatalogJanitorR" + + "esponse\022\021\n\tprevValue\030\001 \001(\010\" \n\036IsCatalogJ" + + "anitorEnabledRequest\"0\n\037IsCatalogJanitor", + "EnabledResponse\022\r\n\005value\030\001 \002(\0102\201\n\n\022Maste" + + "rAdminService\0222\n\taddColumn\022\021.AddColumnRe" + + "quest\032\022.AddColumnResponse\022;\n\014deleteColum" + + "n\022\024.DeleteColumnRequest\032\025.DeleteColumnRe" + + "sponse\022;\n\014modifyColumn\022\024.ModifyColumnReq" + + "uest\032\025.ModifyColumnResponse\0225\n\nmoveRegio" + + "n\022\022.MoveRegionRequest\032\023.MoveRegionRespon" + + "se\022;\n\014assignRegion\022\024.AssignRegionRequest" + + "\032\025.AssignRegionResponse\022A\n\016unassignRegio" + + "n\022\026.UnassignRegionRequest\032\027.UnassignRegi", + "onResponse\022>\n\rofflineRegion\022\025.OfflineReg" + + "ionRequest\032\026.OfflineRegionResponse\0228\n\013de" + + "leteTable\022\023.DeleteTableRequest\032\024.DeleteT" + + "ableResponse\0228\n\013enableTable\022\023.EnableTabl" + + "eRequest\032\024.EnableTableResponse\022;\n\014disabl" + + "eTable\022\024.DisableTableRequest\032\025.DisableTa" + + "bleResponse\0228\n\013modifyTable\022\023.ModifyTable" + + "Request\032\024.ModifyTableResponse\0228\n\013createT" + + "able\022\023.CreateTableRequest\032\024.CreateTableR" + + "esponse\022/\n\010shutdown\022\020.ShutdownRequest\032\021.", + "ShutdownResponse\0225\n\nstopMaster\022\022.StopMas" + + "terRequest\032\023.StopMasterResponse\022,\n\007balan" + + "ce\022\017.BalanceRequest\032\020.BalanceResponse\022M\n" + + "\022setBalancerRunning\022\032.SetBalancerRunning" + + "Request\032\033.SetBalancerRunningResponse\022;\n\016" + + "runCatalogScan\022\023.CatalogScanRequest\032\024.Ca" + + "talogScanResponse\022S\n\024enableCatalogJanito" + + "r\022\034.EnableCatalogJanitorRequest\032\035.Enable" + + "CatalogJanitorResponse\022\\\n\027isCatalogJanit" + + "orEnabled\022\037.IsCatalogJanitorEnabledReque", + "st\032 .IsCatalogJanitorEnabledResponse\022L\n\021" + + "execMasterService\022\032.CoprocessorServiceRe" + + "quest\032\033.CoprocessorServiceResponseBG\n*or" + + "g.apache.hadoop.hbase.protobuf.generated" + + "B\021MasterAdminProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_AddColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_AddColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnFamilies", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.Builder.class); + internal_static_AddColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_AddColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddColumnResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.Builder.class); + internal_static_DeleteColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_DeleteColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.Builder.class); + internal_static_DeleteColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_DeleteColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteColumnResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.Builder.class); + internal_static_ModifyColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_ModifyColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnFamilies", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.Builder.class); + internal_static_ModifyColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_ModifyColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyColumnResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.Builder.class); + internal_static_MoveRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_MoveRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MoveRegionRequest_descriptor, + new java.lang.String[] { "Region", "DestServerName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.Builder.class); + internal_static_MoveRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_MoveRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MoveRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.Builder.class); + internal_static_AssignRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_AssignRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AssignRegionRequest_descriptor, + new java.lang.String[] { "Region", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.Builder.class); + internal_static_AssignRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_AssignRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AssignRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.Builder.class); + internal_static_UnassignRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_UnassignRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnassignRegionRequest_descriptor, + new java.lang.String[] { "Region", "Force", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.Builder.class); + internal_static_UnassignRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_UnassignRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnassignRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.Builder.class); + internal_static_OfflineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_OfflineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OfflineRegionRequest_descriptor, + new java.lang.String[] { "Region", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.Builder.class); + internal_static_OfflineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_OfflineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OfflineRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.Builder.class); + internal_static_CreateTableRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_CreateTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CreateTableRequest_descriptor, + new java.lang.String[] { "TableSchema", "SplitKeys", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.Builder.class); + internal_static_CreateTableResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_CreateTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CreateTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.Builder.class); + internal_static_DeleteTableRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_DeleteTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteTableRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.Builder.class); + internal_static_DeleteTableResponse_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_DeleteTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.Builder.class); + internal_static_EnableTableRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_EnableTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EnableTableRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.Builder.class); + internal_static_EnableTableResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_EnableTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EnableTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.Builder.class); + internal_static_DisableTableRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_DisableTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DisableTableRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.Builder.class); + internal_static_DisableTableResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_DisableTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DisableTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.Builder.class); + internal_static_ModifyTableRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_ModifyTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyTableRequest_descriptor, + new java.lang.String[] { "TableName", "TableSchema", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.Builder.class); + internal_static_ModifyTableResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_ModifyTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.Builder.class); + internal_static_ShutdownRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_ShutdownRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ShutdownRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.Builder.class); + internal_static_ShutdownResponse_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_ShutdownResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ShutdownResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.Builder.class); + internal_static_StopMasterRequest_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_StopMasterRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopMasterRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.Builder.class); + internal_static_StopMasterResponse_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_StopMasterResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopMasterResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.Builder.class); + internal_static_BalanceRequest_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_BalanceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BalanceRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.Builder.class); + internal_static_BalanceResponse_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_BalanceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BalanceResponse_descriptor, + new java.lang.String[] { "BalancerRan", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.Builder.class); + internal_static_SetBalancerRunningRequest_descriptor = + getDescriptor().getMessageTypes().get(30); + internal_static_SetBalancerRunningRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SetBalancerRunningRequest_descriptor, + new java.lang.String[] { "On", "Synchronous", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.Builder.class); + internal_static_SetBalancerRunningResponse_descriptor = + getDescriptor().getMessageTypes().get(31); + internal_static_SetBalancerRunningResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SetBalancerRunningResponse_descriptor, + new java.lang.String[] { "PrevBalanceValue", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.Builder.class); + internal_static_CatalogScanRequest_descriptor = + getDescriptor().getMessageTypes().get(32); + internal_static_CatalogScanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CatalogScanRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); + internal_static_CatalogScanResponse_descriptor = + getDescriptor().getMessageTypes().get(33); + internal_static_CatalogScanResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CatalogScanResponse_descriptor, + new java.lang.String[] { "ScanResult", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); + internal_static_EnableCatalogJanitorRequest_descriptor = + getDescriptor().getMessageTypes().get(34); + internal_static_EnableCatalogJanitorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EnableCatalogJanitorRequest_descriptor, + new java.lang.String[] { "Enable", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.Builder.class); + internal_static_EnableCatalogJanitorResponse_descriptor = + getDescriptor().getMessageTypes().get(35); + internal_static_EnableCatalogJanitorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EnableCatalogJanitorResponse_descriptor, + new java.lang.String[] { "PrevValue", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.Builder.class); + internal_static_IsCatalogJanitorEnabledRequest_descriptor = + getDescriptor().getMessageTypes().get(36); + internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IsCatalogJanitorEnabledRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.Builder.class); + internal_static_IsCatalogJanitorEnabledResponse_descriptor = + getDescriptor().getMessageTypes().get(37); + internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IsCatalogJanitorEnabledResponse_descriptor, + new java.lang.String[] { "Value", }, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java new file mode 100644 index 0000000..1f36d15 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java @@ -0,0 +1,3090 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: MasterMonitor.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class MasterMonitorProtos { + private MasterMonitorProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface GetSchemaAlterStatusRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class GetSchemaAlterStatusRequest extends + com.google.protobuf.GeneratedMessage + implements GetSchemaAlterStatusRequestOrBuilder { + // Use GetSchemaAlterStatusRequest.newBuilder() to construct. + private GetSchemaAlterStatusRequest(Builder builder) { + super(builder); + } + private GetSchemaAlterStatusRequest(boolean noInit) {} + + private static final GetSchemaAlterStatusRequest defaultInstance; + public static GetSchemaAlterStatusRequest getDefaultInstance() { + return defaultInstance; + } + + public GetSchemaAlterStatusRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest) + } + + static { + defaultInstance = new GetSchemaAlterStatusRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest) + } + + public interface GetSchemaAlterStatusResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint32 yetToUpdateRegions = 1; + boolean hasYetToUpdateRegions(); + int getYetToUpdateRegions(); + + // optional uint32 totalRegions = 2; + boolean hasTotalRegions(); + int getTotalRegions(); + } + public static final class GetSchemaAlterStatusResponse extends + com.google.protobuf.GeneratedMessage + implements GetSchemaAlterStatusResponseOrBuilder { + // Use GetSchemaAlterStatusResponse.newBuilder() to construct. + private GetSchemaAlterStatusResponse(Builder builder) { + super(builder); + } + private GetSchemaAlterStatusResponse(boolean noInit) {} + + private static final GetSchemaAlterStatusResponse defaultInstance; + public static GetSchemaAlterStatusResponse getDefaultInstance() { + return defaultInstance; + } + + public GetSchemaAlterStatusResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional uint32 yetToUpdateRegions = 1; + public static final int YETTOUPDATEREGIONS_FIELD_NUMBER = 1; + private int yetToUpdateRegions_; + public boolean hasYetToUpdateRegions() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getYetToUpdateRegions() { + return yetToUpdateRegions_; + } + + // optional uint32 totalRegions = 2; + public static final int TOTALREGIONS_FIELD_NUMBER = 2; + private int totalRegions_; + public boolean hasTotalRegions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTotalRegions() { + return totalRegions_; + } + + private void initFields() { + yetToUpdateRegions_ = 0; + totalRegions_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, yetToUpdateRegions_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, totalRegions_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, yetToUpdateRegions_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, totalRegions_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) obj; + + boolean result = true; + result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions()); + if (hasYetToUpdateRegions()) { + result = result && (getYetToUpdateRegions() + == other.getYetToUpdateRegions()); + } + result = result && (hasTotalRegions() == other.hasTotalRegions()); + if (hasTotalRegions()) { + result = result && (getTotalRegions() + == other.getTotalRegions()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasYetToUpdateRegions()) { + hash = (37 * hash) + YETTOUPDATEREGIONS_FIELD_NUMBER; + hash = (53 * hash) + getYetToUpdateRegions(); + } + if (hasTotalRegions()) { + hash = (37 * hash) + TOTALREGIONS_FIELD_NUMBER; + hash = (53 * hash) + getTotalRegions(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + yetToUpdateRegions_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + totalRegions_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.yetToUpdateRegions_ = yetToUpdateRegions_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.totalRegions_ = totalRegions_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this; + if (other.hasYetToUpdateRegions()) { + setYetToUpdateRegions(other.getYetToUpdateRegions()); + } + if (other.hasTotalRegions()) { + setTotalRegions(other.getTotalRegions()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + yetToUpdateRegions_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + totalRegions_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // optional uint32 yetToUpdateRegions = 1; + private int yetToUpdateRegions_ ; + public boolean hasYetToUpdateRegions() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getYetToUpdateRegions() { + return yetToUpdateRegions_; + } + public Builder setYetToUpdateRegions(int value) { + bitField0_ |= 0x00000001; + yetToUpdateRegions_ = value; + onChanged(); + return this; + } + public Builder clearYetToUpdateRegions() { + bitField0_ = (bitField0_ & ~0x00000001); + yetToUpdateRegions_ = 0; + onChanged(); + return this; + } + + // optional uint32 totalRegions = 2; + private int totalRegions_ ; + public boolean hasTotalRegions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTotalRegions() { + return totalRegions_; + } + public Builder setTotalRegions(int value) { + bitField0_ |= 0x00000002; + totalRegions_ = value; + onChanged(); + return this; + } + public Builder clearTotalRegions() { + bitField0_ = (bitField0_ & ~0x00000002); + totalRegions_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse) + } + + static { + defaultInstance = new GetSchemaAlterStatusResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse) + } + + public interface GetTableDescriptorsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string tableNames = 1; + java.util.List getTableNamesList(); + int getTableNamesCount(); + String getTableNames(int index); + } + public static final class GetTableDescriptorsRequest extends + com.google.protobuf.GeneratedMessage + implements GetTableDescriptorsRequestOrBuilder { + // Use GetTableDescriptorsRequest.newBuilder() to construct. + private GetTableDescriptorsRequest(Builder builder) { + super(builder); + } + private GetTableDescriptorsRequest(boolean noInit) {} + + private static final GetTableDescriptorsRequest defaultInstance; + public static GetTableDescriptorsRequest getDefaultInstance() { + return defaultInstance; + } + + public GetTableDescriptorsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + } + + // repeated string tableNames = 1; + public static final int TABLENAMES_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList tableNames_; + public java.util.List + getTableNamesList() { + return tableNames_; + } + public int getTableNamesCount() { + return tableNames_.size(); + } + public String getTableNames(int index) { + return tableNames_.get(index); + } + + private void initFields() { + tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < tableNames_.size(); i++) { + output.writeBytes(1, tableNames_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < tableNames_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(tableNames_.getByteString(i)); + } + size += dataSize; + size += 1 * getTableNamesList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) obj; + + boolean result = true; + result = result && getTableNamesList() + .equals(other.getTableNamesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getTableNamesCount() > 0) { + hash = (37 * hash) + TABLENAMES_FIELD_NUMBER; + hash = (53 * hash) + getTableNamesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + tableNames_ = new com.google.protobuf.UnmodifiableLazyStringList( + tableNames_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.tableNames_ = tableNames_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this; + if (!other.tableNames_.isEmpty()) { + if (tableNames_.isEmpty()) { + tableNames_ = other.tableNames_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTableNamesIsMutable(); + tableNames_.addAll(other.tableNames_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureTableNamesIsMutable(); + tableNames_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated string tableNames = 1; + private com.google.protobuf.LazyStringList tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureTableNamesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + tableNames_ = new com.google.protobuf.LazyStringArrayList(tableNames_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getTableNamesList() { + return java.util.Collections.unmodifiableList(tableNames_); + } + public int getTableNamesCount() { + return tableNames_.size(); + } + public String getTableNames(int index) { + return tableNames_.get(index); + } + public Builder setTableNames( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableNamesIsMutable(); + tableNames_.set(index, value); + onChanged(); + return this; + } + public Builder addTableNames(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableNamesIsMutable(); + tableNames_.add(value); + onChanged(); + return this; + } + public Builder addAllTableNames( + java.lang.Iterable values) { + ensureTableNamesIsMutable(); + super.addAll(values, tableNames_); + onChanged(); + return this; + } + public Builder clearTableNames() { + tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + void addTableNames(com.google.protobuf.ByteString value) { + ensureTableNamesIsMutable(); + tableNames_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest) + } + + static { + defaultInstance = new GetTableDescriptorsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest) + } + + public interface GetTableDescriptorsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .TableSchema tableSchema = 1; + java.util.List + getTableSchemaList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); + int getTableSchemaCount(); + java.util.List + getTableSchemaOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( + int index); + } + public static final class GetTableDescriptorsResponse extends + com.google.protobuf.GeneratedMessage + implements GetTableDescriptorsResponseOrBuilder { + // Use GetTableDescriptorsResponse.newBuilder() to construct. + private GetTableDescriptorsResponse(Builder builder) { + super(builder); + } + private GetTableDescriptorsResponse(boolean noInit) {} + + private static final GetTableDescriptorsResponse defaultInstance; + public static GetTableDescriptorsResponse getDefaultInstance() { + return defaultInstance; + } + + public GetTableDescriptorsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + } + + // repeated .TableSchema tableSchema = 1; + public static final int TABLESCHEMA_FIELD_NUMBER = 1; + private java.util.List tableSchema_; + public java.util.List getTableSchemaList() { + return tableSchema_; + } + public java.util.List + getTableSchemaOrBuilderList() { + return tableSchema_; + } + public int getTableSchemaCount() { + return tableSchema_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { + return tableSchema_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( + int index) { + return tableSchema_.get(index); + } + + private void initFields() { + tableSchema_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getTableSchemaCount(); i++) { + if (!getTableSchema(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < tableSchema_.size(); i++) { + output.writeMessage(1, tableSchema_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < tableSchema_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, tableSchema_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) obj; + + boolean result = true; + result = result && getTableSchemaList() + .equals(other.getTableSchemaList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getTableSchemaCount() > 0) { + hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getTableSchemaList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableSchemaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (tableSchemaBuilder_ == null) { + tableSchema_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + tableSchemaBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse(this); + int from_bitField0_ = bitField0_; + if (tableSchemaBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.tableSchema_ = tableSchema_; + } else { + result.tableSchema_ = tableSchemaBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this; + if (tableSchemaBuilder_ == null) { + if (!other.tableSchema_.isEmpty()) { + if (tableSchema_.isEmpty()) { + tableSchema_ = other.tableSchema_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTableSchemaIsMutable(); + tableSchema_.addAll(other.tableSchema_); + } + onChanged(); + } + } else { + if (!other.tableSchema_.isEmpty()) { + if (tableSchemaBuilder_.isEmpty()) { + tableSchemaBuilder_.dispose(); + tableSchemaBuilder_ = null; + tableSchema_ = other.tableSchema_; + bitField0_ = (bitField0_ & ~0x00000001); + tableSchemaBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getTableSchemaFieldBuilder() : null; + } else { + tableSchemaBuilder_.addAllMessages(other.tableSchema_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getTableSchemaCount(); i++) { + if (!getTableSchema(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addTableSchema(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .TableSchema tableSchema = 1; + private java.util.List tableSchema_ = + java.util.Collections.emptyList(); + private void ensureTableSchemaIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + tableSchema_ = new java.util.ArrayList(tableSchema_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + + public java.util.List getTableSchemaList() { + if (tableSchemaBuilder_ == null) { + return java.util.Collections.unmodifiableList(tableSchema_); + } else { + return tableSchemaBuilder_.getMessageList(); + } + } + public int getTableSchemaCount() { + if (tableSchemaBuilder_ == null) { + return tableSchema_.size(); + } else { + return tableSchemaBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { + if (tableSchemaBuilder_ == null) { + return tableSchema_.get(index); + } else { + return tableSchemaBuilder_.getMessage(index); + } + } + public Builder setTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableSchemaIsMutable(); + tableSchema_.set(index, value); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(index, value); + } + return this; + } + public Builder setTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.set(index, builderForValue.build()); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableSchemaIsMutable(); + tableSchema_.add(value); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(value); + } + return this; + } + public Builder addTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableSchemaIsMutable(); + tableSchema_.add(index, value); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(index, value); + } + return this; + } + public Builder addTableSchema( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.add(builderForValue.build()); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.add(index, builderForValue.build()); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllTableSchema( + java.lang.Iterable values) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + super.addAll(values, tableSchema_); + onChanged(); + } else { + tableSchemaBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearTableSchema() { + if (tableSchemaBuilder_ == null) { + tableSchema_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + tableSchemaBuilder_.clear(); + } + return this; + } + public Builder removeTableSchema(int index) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.remove(index); + onChanged(); + } else { + tableSchemaBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder( + int index) { + return getTableSchemaFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( + int index) { + if (tableSchemaBuilder_ == null) { + return tableSchema_.get(index); } else { + return tableSchemaBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getTableSchemaOrBuilderList() { + if (tableSchemaBuilder_ != null) { + return tableSchemaBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(tableSchema_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() { + return getTableSchemaFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder( + int index) { + return getTableSchemaFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); + } + public java.util.List + getTableSchemaBuilderList() { + return getTableSchemaFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> + getTableSchemaFieldBuilder() { + if (tableSchemaBuilder_ == null) { + tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( + tableSchema_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + tableSchema_ = null; + } + return tableSchemaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsResponse) + } + + static { + defaultInstance = new GetTableDescriptorsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetTableDescriptorsResponse) + } + + public interface GetClusterStatusRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetClusterStatusRequest extends + com.google.protobuf.GeneratedMessage + implements GetClusterStatusRequestOrBuilder { + // Use GetClusterStatusRequest.newBuilder() to construct. + private GetClusterStatusRequest(Builder builder) { + super(builder); + } + private GetClusterStatusRequest(boolean noInit) {} + + private static final GetClusterStatusRequest defaultInstance; + public static GetClusterStatusRequest getDefaultInstance() { + return defaultInstance; + } + + public GetClusterStatusRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetClusterStatusRequest) + } + + static { + defaultInstance = new GetClusterStatusRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetClusterStatusRequest) + } + + public interface GetClusterStatusResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ClusterStatus clusterStatus = 1; + boolean hasClusterStatus(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus(); + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder(); + } + public static final class GetClusterStatusResponse extends + com.google.protobuf.GeneratedMessage + implements GetClusterStatusResponseOrBuilder { + // Use GetClusterStatusResponse.newBuilder() to construct. + private GetClusterStatusResponse(Builder builder) { + super(builder); + } + private GetClusterStatusResponse(boolean noInit) {} + + private static final GetClusterStatusResponse defaultInstance; + public static GetClusterStatusResponse getDefaultInstance() { + return defaultInstance; + } + + public GetClusterStatusResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .ClusterStatus clusterStatus = 1; + public static final int CLUSTERSTATUS_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_; + public boolean hasClusterStatus() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { + return clusterStatus_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { + return clusterStatus_; + } + + private void initFields() { + clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClusterStatus()) { + memoizedIsInitialized = 0; + return false; + } + if (!getClusterStatus().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, clusterStatus_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, clusterStatus_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) obj; + + boolean result = true; + result = result && (hasClusterStatus() == other.hasClusterStatus()); + if (hasClusterStatus()) { + result = result && getClusterStatus() + .equals(other.getClusterStatus()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClusterStatus()) { + hash = (37 * hash) + CLUSTERSTATUS_FIELD_NUMBER; + hash = (53 * hash) + getClusterStatus().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getClusterStatusFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (clusterStatusBuilder_ == null) { + clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + } else { + clusterStatusBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (clusterStatusBuilder_ == null) { + result.clusterStatus_ = clusterStatus_; + } else { + result.clusterStatus_ = clusterStatusBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance()) return this; + if (other.hasClusterStatus()) { + mergeClusterStatus(other.getClusterStatus()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClusterStatus()) { + + return false; + } + if (!getClusterStatus().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(); + if (hasClusterStatus()) { + subBuilder.mergeFrom(getClusterStatus()); + } + input.readMessage(subBuilder, extensionRegistry); + setClusterStatus(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ClusterStatus clusterStatus = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> clusterStatusBuilder_; + public boolean hasClusterStatus() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { + if (clusterStatusBuilder_ == null) { + return clusterStatus_; + } else { + return clusterStatusBuilder_.getMessage(); + } + } + public Builder setClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { + if (clusterStatusBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + clusterStatus_ = value; + onChanged(); + } else { + clusterStatusBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setClusterStatus( + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder builderForValue) { + if (clusterStatusBuilder_ == null) { + clusterStatus_ = builderForValue.build(); + onChanged(); + } else { + clusterStatusBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { + if (clusterStatusBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + clusterStatus_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) { + clusterStatus_ = + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(clusterStatus_).mergeFrom(value).buildPartial(); + } else { + clusterStatus_ = value; + } + onChanged(); + } else { + clusterStatusBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearClusterStatus() { + if (clusterStatusBuilder_ == null) { + clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + onChanged(); + } else { + clusterStatusBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder getClusterStatusBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getClusterStatusFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { + if (clusterStatusBuilder_ != null) { + return clusterStatusBuilder_.getMessageOrBuilder(); + } else { + return clusterStatus_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> + getClusterStatusFieldBuilder() { + if (clusterStatusBuilder_ == null) { + clusterStatusBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder>( + clusterStatus_, + getParentForChildren(), + isClean()); + clusterStatus_ = null; + } + return clusterStatusBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetClusterStatusResponse) + } + + static { + defaultInstance = new GetClusterStatusResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetClusterStatusResponse) + } + + public static abstract class MasterMonitorService + implements com.google.protobuf.Service { + protected MasterMonitorService() {} + + public interface Interface { + public abstract void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getClusterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new MasterMonitorService() { + @java.lang.Override + public void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done) { + impl.getSchemaAlterStatus(controller, request, done); + } + + @java.lang.Override + public void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done) { + impl.getTableDescriptors(controller, request, done); + } + + @java.lang.Override + public void getClusterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, + com.google.protobuf.RpcCallback done) { + impl.getClusterStatus(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)request); + case 1: + return impl.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)request); + case 2: + return impl.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getClusterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.MasterMonitorService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance())); + } + + public void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance())); + } + + public void getClusterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getClusterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getClusterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetSchemaAlterStatusRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetSchemaAlterStatusResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetTableDescriptorsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetTableDescriptorsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetClusterStatusRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetClusterStatusRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetClusterStatusResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetClusterStatusResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\023MasterMonitor.proto\032\013hbase.proto\032\023Clus" + + "terStatus.proto\"0\n\033GetSchemaAlterStatusR" + + "equest\022\021\n\ttableName\030\001 \002(\014\"P\n\034GetSchemaAl" + + "terStatusResponse\022\032\n\022yetToUpdateRegions\030" + + "\001 \001(\r\022\024\n\014totalRegions\030\002 \001(\r\"0\n\032GetTableD" + + "escriptorsRequest\022\022\n\ntableNames\030\001 \003(\t\"@\n" + + "\033GetTableDescriptorsResponse\022!\n\013tableSch" + + "ema\030\001 \003(\0132\014.TableSchema\"\031\n\027GetClusterSta" + + "tusRequest\"A\n\030GetClusterStatusResponse\022%" + + "\n\rclusterStatus\030\001 \002(\0132\016.ClusterStatus2\206\002", + "\n\024MasterMonitorService\022S\n\024getSchemaAlter" + + "Status\022\034.GetSchemaAlterStatusRequest\032\035.G" + + "etSchemaAlterStatusResponse\022P\n\023getTableD" + + "escriptors\022\033.GetTableDescriptorsRequest\032" + + "\034.GetTableDescriptorsResponse\022G\n\020getClus" + + "terStatus\022\030.GetClusterStatusRequest\032\031.Ge" + + "tClusterStatusResponseBI\n*org.apache.had" + + "oop.hbase.protobuf.generatedB\023MasterMoni" + + "torProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_GetSchemaAlterStatusRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetSchemaAlterStatusRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); + internal_static_GetSchemaAlterStatusResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetSchemaAlterStatusResponse_descriptor, + new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); + internal_static_GetTableDescriptorsRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_GetTableDescriptorsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetTableDescriptorsRequest_descriptor, + new java.lang.String[] { "TableNames", }, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); + internal_static_GetTableDescriptorsResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GetTableDescriptorsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetTableDescriptorsResponse_descriptor, + new java.lang.String[] { "TableSchema", }, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); + internal_static_GetClusterStatusRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetClusterStatusRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetClusterStatusRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.Builder.class); + internal_static_GetClusterStatusResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_GetClusterStatusResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetClusterStatusResponse_descriptor, + new java.lang.String[] { "ClusterStatus", }, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java new file mode 100644 index 0000000..b8c322f --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -0,0 +1,969 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Master.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class MasterProtos { + private MasterProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface IsMasterRunningRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class IsMasterRunningRequest extends + com.google.protobuf.GeneratedMessage + implements IsMasterRunningRequestOrBuilder { + // Use IsMasterRunningRequest.newBuilder() to construct. + private IsMasterRunningRequest(Builder builder) { + super(builder); + } + private IsMasterRunningRequest(boolean noInit) {} + + private static final IsMasterRunningRequest defaultInstance; + public static IsMasterRunningRequest getDefaultInstance() { + return defaultInstance; + } + + public IsMasterRunningRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:IsMasterRunningRequest) + } + + static { + defaultInstance = new IsMasterRunningRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IsMasterRunningRequest) + } + + public interface IsMasterRunningResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool isMasterRunning = 1; + boolean hasIsMasterRunning(); + boolean getIsMasterRunning(); + } + public static final class IsMasterRunningResponse extends + com.google.protobuf.GeneratedMessage + implements IsMasterRunningResponseOrBuilder { + // Use IsMasterRunningResponse.newBuilder() to construct. + private IsMasterRunningResponse(Builder builder) { + super(builder); + } + private IsMasterRunningResponse(boolean noInit) {} + + private static final IsMasterRunningResponse defaultInstance; + public static IsMasterRunningResponse getDefaultInstance() { + return defaultInstance; + } + + public IsMasterRunningResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool isMasterRunning = 1; + public static final int ISMASTERRUNNING_FIELD_NUMBER = 1; + private boolean isMasterRunning_; + public boolean hasIsMasterRunning() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getIsMasterRunning() { + return isMasterRunning_; + } + + private void initFields() { + isMasterRunning_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasIsMasterRunning()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, isMasterRunning_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, isMasterRunning_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) obj; + + boolean result = true; + result = result && (hasIsMasterRunning() == other.hasIsMasterRunning()); + if (hasIsMasterRunning()) { + result = result && (getIsMasterRunning() + == other.getIsMasterRunning()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasIsMasterRunning()) { + hash = (37 * hash) + ISMASTERRUNNING_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getIsMasterRunning()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + isMasterRunning_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.isMasterRunning_ = isMasterRunning_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()) return this; + if (other.hasIsMasterRunning()) { + setIsMasterRunning(other.getIsMasterRunning()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasIsMasterRunning()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + isMasterRunning_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool isMasterRunning = 1; + private boolean isMasterRunning_ ; + public boolean hasIsMasterRunning() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getIsMasterRunning() { + return isMasterRunning_; + } + public Builder setIsMasterRunning(boolean value) { + bitField0_ |= 0x00000001; + isMasterRunning_ = value; + onChanged(); + return this; + } + public Builder clearIsMasterRunning() { + bitField0_ = (bitField0_ & ~0x00000001); + isMasterRunning_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:IsMasterRunningResponse) + } + + static { + defaultInstance = new IsMasterRunningResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IsMasterRunningResponse) + } + + public static abstract class MasterService + implements com.google.protobuf.Service { + protected MasterService() {} + + public interface Interface { + public abstract void isMasterRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new MasterService() { + @java.lang.Override + public void isMasterRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, + com.google.protobuf.RpcCallback done) { + impl.isMasterRunning(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void isMasterRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void isMasterRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IsMasterRunningRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IsMasterRunningRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IsMasterRunningResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IsMasterRunningResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Master.proto\"\030\n\026IsMasterRunningRequest" + + "\"2\n\027IsMasterRunningResponse\022\027\n\017isMasterR" + + "unning\030\001 \002(\0102U\n\rMasterService\022D\n\017isMaste" + + "rRunning\022\027.IsMasterRunningRequest\032\030.IsMa" + + "sterRunningResponseBB\n*org.apache.hadoop" + + ".hbase.protobuf.generatedB\014MasterProtosH" + + "\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_IsMasterRunningRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_IsMasterRunningRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IsMasterRunningRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); + internal_static_IsMasterRunningResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_IsMasterRunningResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IsMasterRunningResponse_descriptor, + new java.lang.String[] { "IsMasterRunning", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java new file mode 100644 index 0000000..f619f9c --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java @@ -0,0 +1,1184 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: MultiRowMutation.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class MultiRowMutation { + private MultiRowMutation() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface MultiMutateRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Mutate mutationRequest = 1; + java.util.List + getMutationRequestList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutationRequest(int index); + int getMutationRequestCount(); + java.util.List + getMutationRequestOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutationRequestOrBuilder( + int index); + } + public static final class MultiMutateRequest extends + com.google.protobuf.GeneratedMessage + implements MultiMutateRequestOrBuilder { + // Use MultiMutateRequest.newBuilder() to construct. + private MultiMutateRequest(Builder builder) { + super(builder); + } + private MultiMutateRequest(boolean noInit) {} + + private static final MultiMutateRequest defaultInstance; + public static MultiMutateRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiMutateRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable; + } + + // repeated .Mutate mutationRequest = 1; + public static final int MUTATIONREQUEST_FIELD_NUMBER = 1; + private java.util.List mutationRequest_; + public java.util.List getMutationRequestList() { + return mutationRequest_; + } + public java.util.List + getMutationRequestOrBuilderList() { + return mutationRequest_; + } + public int getMutationRequestCount() { + return mutationRequest_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutationRequest(int index) { + return mutationRequest_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutationRequestOrBuilder( + int index) { + return mutationRequest_.get(index); + } + + private void initFields() { + mutationRequest_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getMutationRequestCount(); i++) { + if (!getMutationRequest(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < mutationRequest_.size(); i++) { + output.writeMessage(1, mutationRequest_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < mutationRequest_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, mutationRequest_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) obj; + + boolean result = true; + result = result && getMutationRequestList() + .equals(other.getMutationRequestList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getMutationRequestCount() > 0) { + hash = (37 * hash) + MUTATIONREQUEST_FIELD_NUMBER; + hash = (53 * hash) + getMutationRequestList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMutationRequestFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (mutationRequestBuilder_ == null) { + mutationRequest_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + mutationRequestBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this); + int from_bitField0_ = bitField0_; + if (mutationRequestBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.mutationRequest_ = mutationRequest_; + } else { + result.mutationRequest_ = mutationRequestBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance()) return this; + if (mutationRequestBuilder_ == null) { + if (!other.mutationRequest_.isEmpty()) { + if (mutationRequest_.isEmpty()) { + mutationRequest_ = other.mutationRequest_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMutationRequestIsMutable(); + mutationRequest_.addAll(other.mutationRequest_); + } + onChanged(); + } + } else { + if (!other.mutationRequest_.isEmpty()) { + if (mutationRequestBuilder_.isEmpty()) { + mutationRequestBuilder_.dispose(); + mutationRequestBuilder_ = null; + mutationRequest_ = other.mutationRequest_; + bitField0_ = (bitField0_ & ~0x00000001); + mutationRequestBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getMutationRequestFieldBuilder() : null; + } else { + mutationRequestBuilder_.addAllMessages(other.mutationRequest_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getMutationRequestCount(); i++) { + if (!getMutationRequest(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addMutationRequest(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Mutate mutationRequest = 1; + private java.util.List mutationRequest_ = + java.util.Collections.emptyList(); + private void ensureMutationRequestIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = new java.util.ArrayList(mutationRequest_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutationRequestBuilder_; + + public java.util.List getMutationRequestList() { + if (mutationRequestBuilder_ == null) { + return java.util.Collections.unmodifiableList(mutationRequest_); + } else { + return mutationRequestBuilder_.getMessageList(); + } + } + public int getMutationRequestCount() { + if (mutationRequestBuilder_ == null) { + return mutationRequest_.size(); + } else { + return mutationRequestBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutationRequest(int index) { + if (mutationRequestBuilder_ == null) { + return mutationRequest_.get(index); + } else { + return mutationRequestBuilder_.getMessage(index); + } + } + public Builder setMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutationRequestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationRequestIsMutable(); + mutationRequest_.set(index, value); + onChanged(); + } else { + mutationRequestBuilder_.setMessage(index, value); + } + return this; + } + public Builder setMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.set(index, builderForValue.build()); + onChanged(); + } else { + mutationRequestBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutationRequestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationRequestIsMutable(); + mutationRequest_.add(value); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(value); + } + return this; + } + public Builder addMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutationRequestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationRequestIsMutable(); + mutationRequest_.add(index, value); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(index, value); + } + return this; + } + public Builder addMutationRequest( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.add(builderForValue.build()); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.add(index, builderForValue.build()); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllMutationRequest( + java.lang.Iterable values) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + super.addAll(values, mutationRequest_); + onChanged(); + } else { + mutationRequestBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearMutationRequest() { + if (mutationRequestBuilder_ == null) { + mutationRequest_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + mutationRequestBuilder_.clear(); + } + return this; + } + public Builder removeMutationRequest(int index) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.remove(index); + onChanged(); + } else { + mutationRequestBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutationRequestBuilder( + int index) { + return getMutationRequestFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutationRequestOrBuilder( + int index) { + if (mutationRequestBuilder_ == null) { + return mutationRequest_.get(index); } else { + return mutationRequestBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getMutationRequestOrBuilderList() { + if (mutationRequestBuilder_ != null) { + return mutationRequestBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(mutationRequest_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder addMutationRequestBuilder() { + return getMutationRequestFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder addMutationRequestBuilder( + int index) { + return getMutationRequestFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()); + } + public java.util.List + getMutationRequestBuilderList() { + return getMutationRequestFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> + getMutationRequestFieldBuilder() { + if (mutationRequestBuilder_ == null) { + mutationRequestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( + mutationRequest_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + mutationRequest_ = null; + } + return mutationRequestBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiMutateRequest) + } + + static { + defaultInstance = new MultiMutateRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiMutateRequest) + } + + public interface MultiMutateResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class MultiMutateResponse extends + com.google.protobuf.GeneratedMessage + implements MultiMutateResponseOrBuilder { + // Use MultiMutateResponse.newBuilder() to construct. + private MultiMutateResponse(Builder builder) { + super(builder); + } + private MultiMutateResponse(boolean noInit) {} + + private static final MultiMutateResponse defaultInstance; + public static MultiMutateResponse getDefaultInstance() { + return defaultInstance; + } + + public MultiMutateResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:MultiMutateResponse) + } + + static { + defaultInstance = new MultiMutateResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiMutateResponse) + } + + public static abstract class MultiRowMutationService + implements com.google.protobuf.Service { + protected MultiRowMutationService() {} + + public interface Interface { + public abstract void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new MultiRowMutationService() { + @java.lang.Override + public void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, + com.google.protobuf.RpcCallback done) { + impl.mutateRows(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiMutateRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiMutateRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiMutateResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiMutateResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\026MultiRowMutation.proto\032\014Client.proto\"6" + + "\n\022MultiMutateRequest\022 \n\017mutationRequest\030" + + "\001 \003(\0132\007.Mutate\"\025\n\023MultiMutateResponse2R\n" + + "\027MultiRowMutationService\0227\n\nmutateRows\022\023" + + ".MultiMutateRequest\032\024.MultiMutateRespons" + + "eBF\n*org.apache.hadoop.hbase.protobuf.ge" + + "neratedB\020MultiRowMutationH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_MultiMutateRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_MultiMutateRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiMutateRequest_descriptor, + new java.lang.String[] { "MutationRequest", }, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); + internal_static_MultiMutateResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_MultiMutateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiMutateResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java new file mode 100644 index 0000000..302a411 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -0,0 +1,3848 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RPC.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class RPCProtos { + private RPCProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface UserInformationOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string effectiveUser = 1; + boolean hasEffectiveUser(); + String getEffectiveUser(); + + // optional string realUser = 2; + boolean hasRealUser(); + String getRealUser(); + } + public static final class UserInformation extends + com.google.protobuf.GeneratedMessage + implements UserInformationOrBuilder { + // Use UserInformation.newBuilder() to construct. + private UserInformation(Builder builder) { + super(builder); + } + private UserInformation(boolean noInit) {} + + private static final UserInformation defaultInstance; + public static UserInformation getDefaultInstance() { + return defaultInstance; + } + + public UserInformation getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; + } + + private int bitField0_; + // required string effectiveUser = 1; + public static final int EFFECTIVEUSER_FIELD_NUMBER = 1; + private java.lang.Object effectiveUser_; + public boolean hasEffectiveUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getEffectiveUser() { + java.lang.Object ref = effectiveUser_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + effectiveUser_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getEffectiveUserBytes() { + java.lang.Object ref = effectiveUser_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + effectiveUser_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string realUser = 2; + public static final int REALUSER_FIELD_NUMBER = 2; + private java.lang.Object realUser_; + public boolean hasRealUser() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getRealUser() { + java.lang.Object ref = realUser_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + realUser_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getRealUserBytes() { + java.lang.Object ref = realUser_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + realUser_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + effectiveUser_ = ""; + realUser_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEffectiveUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getEffectiveUserBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getRealUserBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getEffectiveUserBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getRealUserBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) obj; + + boolean result = true; + result = result && (hasEffectiveUser() == other.hasEffectiveUser()); + if (hasEffectiveUser()) { + result = result && getEffectiveUser() + .equals(other.getEffectiveUser()); + } + result = result && (hasRealUser() == other.hasRealUser()); + if (hasRealUser()) { + result = result && getRealUser() + .equals(other.getRealUser()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEffectiveUser()) { + hash = (37 * hash) + EFFECTIVEUSER_FIELD_NUMBER; + hash = (53 * hash) + getEffectiveUser().hashCode(); + } + if (hasRealUser()) { + hash = (37 * hash) + REALUSER_FIELD_NUMBER; + hash = (53 * hash) + getRealUser().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + effectiveUser_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + realUser_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.effectiveUser_ = effectiveUser_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.realUser_ = realUser_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this; + if (other.hasEffectiveUser()) { + setEffectiveUser(other.getEffectiveUser()); + } + if (other.hasRealUser()) { + setRealUser(other.getRealUser()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEffectiveUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + effectiveUser_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + realUser_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string effectiveUser = 1; + private java.lang.Object effectiveUser_ = ""; + public boolean hasEffectiveUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getEffectiveUser() { + java.lang.Object ref = effectiveUser_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + effectiveUser_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setEffectiveUser(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + effectiveUser_ = value; + onChanged(); + return this; + } + public Builder clearEffectiveUser() { + bitField0_ = (bitField0_ & ~0x00000001); + effectiveUser_ = getDefaultInstance().getEffectiveUser(); + onChanged(); + return this; + } + void setEffectiveUser(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + effectiveUser_ = value; + onChanged(); + } + + // optional string realUser = 2; + private java.lang.Object realUser_ = ""; + public boolean hasRealUser() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getRealUser() { + java.lang.Object ref = realUser_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + realUser_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setRealUser(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + realUser_ = value; + onChanged(); + return this; + } + public Builder clearRealUser() { + bitField0_ = (bitField0_ & ~0x00000002); + realUser_ = getDefaultInstance().getRealUser(); + onChanged(); + return this; + } + void setRealUser(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + realUser_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:UserInformation) + } + + static { + defaultInstance = new UserInformation(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserInformation) + } + + public interface ConnectionHeaderOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .UserInformation userInfo = 1; + boolean hasUserInfo(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); + + // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + boolean hasProtocol(); + String getProtocol(); + } + public static final class ConnectionHeader extends + com.google.protobuf.GeneratedMessage + implements ConnectionHeaderOrBuilder { + // Use ConnectionHeader.newBuilder() to construct. + private ConnectionHeader(Builder builder) { + super(builder); + } + private ConnectionHeader(boolean noInit) {} + + private static final ConnectionHeader defaultInstance; + public static ConnectionHeader getDefaultInstance() { + return defaultInstance; + } + + public ConnectionHeader getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; + } + + private int bitField0_; + // optional .UserInformation userInfo = 1; + public static final int USERINFO_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_; + public boolean hasUserInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { + return userInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { + return userInfo_; + } + + // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + public static final int PROTOCOL_FIELD_NUMBER = 2; + private java.lang.Object protocol_; + public boolean hasProtocol() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocol() { + java.lang.Object ref = protocol_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + protocol_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getProtocolBytes() { + java.lang.Object ref = protocol_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + protocol_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasUserInfo()) { + if (!getUserInfo().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, userInfo_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getProtocolBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, userInfo_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getProtocolBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) obj; + + boolean result = true; + result = result && (hasUserInfo() == other.hasUserInfo()); + if (hasUserInfo()) { + result = result && getUserInfo() + .equals(other.getUserInfo()); + } + result = result && (hasProtocol() == other.hasProtocol()); + if (hasProtocol()) { + result = result && getProtocol() + .equals(other.getProtocol()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUserInfo()) { + hash = (37 * hash) + USERINFO_FIELD_NUMBER; + hash = (53 * hash) + getUserInfo().hashCode(); + } + if (hasProtocol()) { + hash = (37 * hash) + PROTOCOL_FIELD_NUMBER; + hash = (53 * hash) + getProtocol().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getUserInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (userInfoBuilder_ == null) { + userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + } else { + userInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (userInfoBuilder_ == null) { + result.userInfo_ = userInfo_; + } else { + result.userInfo_ = userInfoBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.protocol_ = protocol_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this; + if (other.hasUserInfo()) { + mergeUserInfo(other.getUserInfo()); + } + if (other.hasProtocol()) { + setProtocol(other.getProtocol()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasUserInfo()) { + if (!getUserInfo().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(); + if (hasUserInfo()) { + subBuilder.mergeFrom(getUserInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setUserInfo(subBuilder.buildPartial()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + protocol_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional .UserInformation userInfo = 1; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; + public boolean hasUserInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { + if (userInfoBuilder_ == null) { + return userInfo_; + } else { + return userInfoBuilder_.getMessage(); + } + } + public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { + if (userInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + userInfo_ = value; + onChanged(); + } else { + userInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setUserInfo( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) { + if (userInfoBuilder_ == null) { + userInfo_ = builderForValue.build(); + onChanged(); + } else { + userInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { + if (userInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { + userInfo_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); + } else { + userInfo_ = value; + } + onChanged(); + } else { + userInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearUserInfo() { + if (userInfoBuilder_ == null) { + userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + onChanged(); + } else { + userInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getUserInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { + if (userInfoBuilder_ != null) { + return userInfoBuilder_.getMessageOrBuilder(); + } else { + return userInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> + getUserInfoFieldBuilder() { + if (userInfoBuilder_ == null) { + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>( + userInfo_, + getParentForChildren(), + isClean()); + userInfo_ = null; + } + return userInfoBuilder_; + } + + // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + private java.lang.Object protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; + public boolean hasProtocol() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocol() { + java.lang.Object ref = protocol_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + protocol_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setProtocol(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + protocol_ = value; + onChanged(); + return this; + } + public Builder clearProtocol() { + bitField0_ = (bitField0_ & ~0x00000002); + protocol_ = getDefaultInstance().getProtocol(); + onChanged(); + return this; + } + void setProtocol(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + protocol_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:ConnectionHeader) + } + + static { + defaultInstance = new ConnectionHeader(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ConnectionHeader) + } + + public interface RpcRequestHeaderOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint32 callId = 1; + boolean hasCallId(); + int getCallId(); + + // optional .RPCTInfo tinfo = 2; + boolean hasTinfo(); + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo(); + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder(); + } + public static final class RpcRequestHeader extends + com.google.protobuf.GeneratedMessage + implements RpcRequestHeaderOrBuilder { + // Use RpcRequestHeader.newBuilder() to construct. + private RpcRequestHeader(Builder builder) { + super(builder); + } + private RpcRequestHeader(boolean noInit) {} + + private static final RpcRequestHeader defaultInstance; + public static RpcRequestHeader getDefaultInstance() { + return defaultInstance; + } + + public RpcRequestHeader getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; + } + + private int bitField0_; + // required uint32 callId = 1; + public static final int CALLID_FIELD_NUMBER = 1; + private int callId_; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCallId() { + return callId_; + } + + // optional .RPCTInfo tinfo = 2; + public static final int TINFO_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_; + public boolean hasTinfo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { + return tinfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { + return tinfo_; + } + + private void initFields() { + callId_ = 0; + tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCallId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, tinfo_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, tinfo_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) obj; + + boolean result = true; + result = result && (hasCallId() == other.hasCallId()); + if (hasCallId()) { + result = result && (getCallId() + == other.getCallId()); + } + result = result && (hasTinfo() == other.hasTinfo()); + if (hasTinfo()) { + result = result && getTinfo() + .equals(other.getTinfo()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCallId()) { + hash = (37 * hash) + CALLID_FIELD_NUMBER; + hash = (53 * hash) + getCallId(); + } + if (hasTinfo()) { + hash = (37 * hash) + TINFO_FIELD_NUMBER; + hash = (53 * hash) + getTinfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeaderOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTinfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + callId_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + if (tinfoBuilder_ == null) { + tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + } else { + tinfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.callId_ = callId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (tinfoBuilder_ == null) { + result.tinfo_ = tinfo_; + } else { + result.tinfo_ = tinfoBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance()) return this; + if (other.hasCallId()) { + setCallId(other.getCallId()); + } + if (other.hasTinfo()) { + mergeTinfo(other.getTinfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCallId()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + callId_ = input.readUInt32(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(); + if (hasTinfo()) { + subBuilder.mergeFrom(getTinfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setTinfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required uint32 callId = 1; + private int callId_ ; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCallId() { + return callId_; + } + public Builder setCallId(int value) { + bitField0_ |= 0x00000001; + callId_ = value; + onChanged(); + return this; + } + public Builder clearCallId() { + bitField0_ = (bitField0_ & ~0x00000001); + callId_ = 0; + onChanged(); + return this; + } + + // optional .RPCTInfo tinfo = 2; + private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> tinfoBuilder_; + public boolean hasTinfo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { + if (tinfoBuilder_ == null) { + return tinfo_; + } else { + return tinfoBuilder_.getMessage(); + } + } + public Builder setTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { + if (tinfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tinfo_ = value; + onChanged(); + } else { + tinfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setTinfo( + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { + if (tinfoBuilder_ == null) { + tinfo_ = builderForValue.build(); + onChanged(); + } else { + tinfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { + if (tinfoBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + tinfo_ != org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) { + tinfo_ = + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(tinfo_).mergeFrom(value).buildPartial(); + } else { + tinfo_ = value; + } + onChanged(); + } else { + tinfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearTinfo() { + if (tinfoBuilder_ == null) { + tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + onChanged(); + } else { + tinfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTinfoBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getTinfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { + if (tinfoBuilder_ != null) { + return tinfoBuilder_.getMessageOrBuilder(); + } else { + return tinfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> + getTinfoFieldBuilder() { + if (tinfoBuilder_ == null) { + tinfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder>( + tinfo_, + getParentForChildren(), + isClean()); + tinfo_ = null; + } + return tinfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RpcRequestHeader) + } + + static { + defaultInstance = new RpcRequestHeader(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RpcRequestHeader) + } + + public interface RpcRequestBodyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string methodName = 1; + boolean hasMethodName(); + String getMethodName(); + + // optional uint64 clientProtocolVersion = 2; + boolean hasClientProtocolVersion(); + long getClientProtocolVersion(); + + // optional bytes request = 3; + boolean hasRequest(); + com.google.protobuf.ByteString getRequest(); + + // optional string requestClassName = 4; + boolean hasRequestClassName(); + String getRequestClassName(); + } + public static final class RpcRequestBody extends + com.google.protobuf.GeneratedMessage + implements RpcRequestBodyOrBuilder { + // Use RpcRequestBody.newBuilder() to construct. + private RpcRequestBody(Builder builder) { + super(builder); + } + private RpcRequestBody(boolean noInit) {} + + private static final RpcRequestBody defaultInstance; + public static RpcRequestBody getDefaultInstance() { + return defaultInstance; + } + + public RpcRequestBody getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; + } + + private int bitField0_; + // required string methodName = 1; + public static final int METHODNAME_FIELD_NUMBER = 1; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + methodName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional uint64 clientProtocolVersion = 2; + public static final int CLIENTPROTOCOLVERSION_FIELD_NUMBER = 2; + private long clientProtocolVersion_; + public boolean hasClientProtocolVersion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getClientProtocolVersion() { + return clientProtocolVersion_; + } + + // optional bytes request = 3; + public static final int REQUEST_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString request_; + public boolean hasRequest() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getRequest() { + return request_; + } + + // optional string requestClassName = 4; + public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4; + private java.lang.Object requestClassName_; + public boolean hasRequestClassName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getRequestClassName() { + java.lang.Object ref = requestClassName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + requestClassName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getRequestClassNameBytes() { + java.lang.Object ref = requestClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + requestClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + methodName_ = ""; + clientProtocolVersion_ = 0L; + request_ = com.google.protobuf.ByteString.EMPTY; + requestClassName_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, clientProtocolVersion_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, request_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getRequestClassNameBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, clientProtocolVersion_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, request_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getRequestClassNameBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) obj; + + boolean result = true; + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && (hasClientProtocolVersion() == other.hasClientProtocolVersion()); + if (hasClientProtocolVersion()) { + result = result && (getClientProtocolVersion() + == other.getClientProtocolVersion()); + } + result = result && (hasRequest() == other.hasRequest()); + if (hasRequest()) { + result = result && getRequest() + .equals(other.getRequest()); + } + result = result && (hasRequestClassName() == other.hasRequestClassName()); + if (hasRequestClassName()) { + result = result && getRequestClassName() + .equals(other.getRequestClassName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (hasClientProtocolVersion()) { + hash = (37 * hash) + CLIENTPROTOCOLVERSION_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getClientProtocolVersion()); + } + if (hasRequest()) { + hash = (37 * hash) + REQUEST_FIELD_NUMBER; + hash = (53 * hash) + getRequest().hashCode(); + } + if (hasRequestClassName()) { + hash = (37 * hash) + REQUESTCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getRequestClassName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBodyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + clientProtocolVersion_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + request_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + requestClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.methodName_ = methodName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.clientProtocolVersion_ = clientProtocolVersion_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.request_ = request_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.requestClassName_ = requestClassName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance()) return this; + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (other.hasClientProtocolVersion()) { + setClientProtocolVersion(other.getClientProtocolVersion()); + } + if (other.hasRequest()) { + setRequest(other.getRequest()); + } + if (other.hasRequestClassName()) { + setRequestClassName(other.getRequestClassName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasMethodName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + methodName_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + clientProtocolVersion_ = input.readUInt64(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + request_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + requestClassName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string methodName = 1; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000001); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + methodName_ = value; + onChanged(); + } + + // optional uint64 clientProtocolVersion = 2; + private long clientProtocolVersion_ ; + public boolean hasClientProtocolVersion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getClientProtocolVersion() { + return clientProtocolVersion_; + } + public Builder setClientProtocolVersion(long value) { + bitField0_ |= 0x00000002; + clientProtocolVersion_ = value; + onChanged(); + return this; + } + public Builder clearClientProtocolVersion() { + bitField0_ = (bitField0_ & ~0x00000002); + clientProtocolVersion_ = 0L; + onChanged(); + return this; + } + + // optional bytes request = 3; + private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRequest() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getRequest() { + return request_; + } + public Builder setRequest(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + request_ = value; + onChanged(); + return this; + } + public Builder clearRequest() { + bitField0_ = (bitField0_ & ~0x00000004); + request_ = getDefaultInstance().getRequest(); + onChanged(); + return this; + } + + // optional string requestClassName = 4; + private java.lang.Object requestClassName_ = ""; + public boolean hasRequestClassName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getRequestClassName() { + java.lang.Object ref = requestClassName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + requestClassName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setRequestClassName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + requestClassName_ = value; + onChanged(); + return this; + } + public Builder clearRequestClassName() { + bitField0_ = (bitField0_ & ~0x00000008); + requestClassName_ = getDefaultInstance().getRequestClassName(); + onChanged(); + return this; + } + void setRequestClassName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000008; + requestClassName_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:RpcRequestBody) + } + + static { + defaultInstance = new RpcRequestBody(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RpcRequestBody) + } + + public interface RpcResponseHeaderOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint32 callId = 1; + boolean hasCallId(); + int getCallId(); + + // required .RpcResponseHeader.Status status = 2; + boolean hasStatus(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus(); + } + public static final class RpcResponseHeader extends + com.google.protobuf.GeneratedMessage + implements RpcResponseHeaderOrBuilder { + // Use RpcResponseHeader.newBuilder() to construct. + private RpcResponseHeader(Builder builder) { + super(builder); + } + private RpcResponseHeader(boolean noInit) {} + + private static final RpcResponseHeader defaultInstance; + public static RpcResponseHeader getDefaultInstance() { + return defaultInstance; + } + + public RpcResponseHeader getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; + } + + public enum Status + implements com.google.protobuf.ProtocolMessageEnum { + SUCCESS(0, 0), + ERROR(1, 1), + FATAL(2, 2), + ; + + public static final int SUCCESS_VALUE = 0; + public static final int ERROR_VALUE = 1; + public static final int FATAL_VALUE = 2; + + + public final int getNumber() { return value; } + + public static Status valueOf(int value) { + switch (value) { + case 0: return SUCCESS; + case 1: return ERROR; + case 2: return FATAL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Status findValueByNumber(int number) { + return Status.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor().getEnumTypes().get(0); + } + + private static final Status[] VALUES = { + SUCCESS, ERROR, FATAL, + }; + + public static Status valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Status(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:RpcResponseHeader.Status) + } + + private int bitField0_; + // required uint32 callId = 1; + public static final int CALLID_FIELD_NUMBER = 1; + private int callId_; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCallId() { + return callId_; + } + + // required .RpcResponseHeader.Status status = 2; + public static final int STATUS_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_; + public boolean hasStatus() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { + return status_; + } + + private void initFields() { + callId_ = 0; + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCallId()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasStatus()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, status_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, status_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) obj; + + boolean result = true; + result = result && (hasCallId() == other.hasCallId()); + if (hasCallId()) { + result = result && (getCallId() + == other.getCallId()); + } + result = result && (hasStatus() == other.hasStatus()); + if (hasStatus()) { + result = result && + (getStatus() == other.getStatus()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCallId()) { + hash = (37 * hash) + CALLID_FIELD_NUMBER; + hash = (53 * hash) + getCallId(); + } + if (hasStatus()) { + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getStatus()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeaderOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + callId_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.callId_ = callId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.status_ = status_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance()) return this; + if (other.hasCallId()) { + setCallId(other.getCallId()); + } + if (other.hasStatus()) { + setStatus(other.getStatus()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCallId()) { + + return false; + } + if (!hasStatus()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + callId_ = input.readUInt32(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + status_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required uint32 callId = 1; + private int callId_ ; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCallId() { + return callId_; + } + public Builder setCallId(int value) { + bitField0_ |= 0x00000001; + callId_ = value; + onChanged(); + return this; + } + public Builder clearCallId() { + bitField0_ = (bitField0_ & ~0x00000001); + callId_ = 0; + onChanged(); + return this; + } + + // required .RpcResponseHeader.Status status = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + public boolean hasStatus() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { + return status_; + } + public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + status_ = value; + onChanged(); + return this; + } + public Builder clearStatus() { + bitField0_ = (bitField0_ & ~0x00000002); + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RpcResponseHeader) + } + + static { + defaultInstance = new RpcResponseHeader(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RpcResponseHeader) + } + + public interface RpcResponseBodyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes response = 1; + boolean hasResponse(); + com.google.protobuf.ByteString getResponse(); + } + public static final class RpcResponseBody extends + com.google.protobuf.GeneratedMessage + implements RpcResponseBodyOrBuilder { + // Use RpcResponseBody.newBuilder() to construct. + private RpcResponseBody(Builder builder) { + super(builder); + } + private RpcResponseBody(boolean noInit) {} + + private static final RpcResponseBody defaultInstance; + public static RpcResponseBody getDefaultInstance() { + return defaultInstance; + } + + public RpcResponseBody getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes response = 1; + public static final int RESPONSE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString response_; + public boolean hasResponse() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getResponse() { + return response_; + } + + private void initFields() { + response_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, response_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, response_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) obj; + + boolean result = true; + result = result && (hasResponse() == other.hasResponse()); + if (hasResponse()) { + result = result && getResponse() + .equals(other.getResponse()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResponse()) { + hash = (37 * hash) + RESPONSE_FIELD_NUMBER; + hash = (53 * hash) + getResponse().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBodyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + response_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.response_ = response_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance()) return this; + if (other.hasResponse()) { + setResponse(other.getResponse()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + response_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes response = 1; + private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasResponse() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getResponse() { + return response_; + } + public Builder setResponse(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + response_ = value; + onChanged(); + return this; + } + public Builder clearResponse() { + bitField0_ = (bitField0_ & ~0x00000001); + response_ = getDefaultInstance().getResponse(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RpcResponseBody) + } + + static { + defaultInstance = new RpcResponseBody(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RpcResponseBody) + } + + public interface RpcExceptionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string exceptionName = 1; + boolean hasExceptionName(); + String getExceptionName(); + + // optional string stackTrace = 2; + boolean hasStackTrace(); + String getStackTrace(); + } + public static final class RpcException extends + com.google.protobuf.GeneratedMessage + implements RpcExceptionOrBuilder { + // Use RpcException.newBuilder() to construct. + private RpcException(Builder builder) { + super(builder); + } + private RpcException(boolean noInit) {} + + private static final RpcException defaultInstance; + public static RpcException getDefaultInstance() { + return defaultInstance; + } + + public RpcException getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; + } + + private int bitField0_; + // required string exceptionName = 1; + public static final int EXCEPTIONNAME_FIELD_NUMBER = 1; + private java.lang.Object exceptionName_; + public boolean hasExceptionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getExceptionName() { + java.lang.Object ref = exceptionName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + exceptionName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getExceptionNameBytes() { + java.lang.Object ref = exceptionName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + exceptionName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string stackTrace = 2; + public static final int STACKTRACE_FIELD_NUMBER = 2; + private java.lang.Object stackTrace_; + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + stackTrace_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + exceptionName_ = ""; + stackTrace_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasExceptionName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getExceptionNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getStackTraceBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getExceptionNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getStackTraceBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) obj; + + boolean result = true; + result = result && (hasExceptionName() == other.hasExceptionName()); + if (hasExceptionName()) { + result = result && getExceptionName() + .equals(other.getExceptionName()); + } + result = result && (hasStackTrace() == other.hasStackTrace()); + if (hasStackTrace()) { + result = result && getStackTrace() + .equals(other.getStackTrace()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasExceptionName()) { + hash = (37 * hash) + EXCEPTIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getExceptionName().hashCode(); + } + if (hasStackTrace()) { + hash = (37 * hash) + STACKTRACE_FIELD_NUMBER; + hash = (53 * hash) + getStackTrace().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcExceptionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + exceptionName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + stackTrace_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.exceptionName_ = exceptionName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.stackTrace_ = stackTrace_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance()) return this; + if (other.hasExceptionName()) { + setExceptionName(other.getExceptionName()); + } + if (other.hasStackTrace()) { + setStackTrace(other.getStackTrace()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasExceptionName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + exceptionName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + stackTrace_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string exceptionName = 1; + private java.lang.Object exceptionName_ = ""; + public boolean hasExceptionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getExceptionName() { + java.lang.Object ref = exceptionName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + exceptionName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setExceptionName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + exceptionName_ = value; + onChanged(); + return this; + } + public Builder clearExceptionName() { + bitField0_ = (bitField0_ & ~0x00000001); + exceptionName_ = getDefaultInstance().getExceptionName(); + onChanged(); + return this; + } + void setExceptionName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + exceptionName_ = value; + onChanged(); + } + + // optional string stackTrace = 2; + private java.lang.Object stackTrace_ = ""; + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + stackTrace_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setStackTrace(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + return this; + } + public Builder clearStackTrace() { + bitField0_ = (bitField0_ & ~0x00000002); + stackTrace_ = getDefaultInstance().getStackTrace(); + onChanged(); + return this; + } + void setStackTrace(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:RpcException) + } + + static { + defaultInstance = new RpcException(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RpcException) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserInformation_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserInformation_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ConnectionHeader_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ConnectionHeader_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RpcRequestHeader_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RpcRequestHeader_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RpcRequestBody_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RpcRequestBody_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RpcResponseHeader_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RpcResponseHeader_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RpcResponseBody_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RpcResponseBody_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RpcException_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RpcException_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\tRPC.proto\032\rTracing.proto\":\n\017UserInform" + + "ation\022\025\n\reffectiveUser\030\001 \002(\t\022\020\n\010realUser" + + "\030\002 \001(\t\"w\n\020ConnectionHeader\022\"\n\010userInfo\030\001" + + " \001(\0132\020.UserInformation\022?\n\010protocol\030\002 \001(\t" + + ":-org.apache.hadoop.hbase.client.ClientP" + + "rotocol\"<\n\020RpcRequestHeader\022\016\n\006callId\030\001 " + + "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"n\n\016RpcRequ" + + "estBody\022\022\n\nmethodName\030\001 \002(\t\022\035\n\025clientPro" + + "tocolVersion\030\002 \001(\004\022\017\n\007request\030\003 \001(\014\022\030\n\020r" + + "equestClassName\030\004 \001(\t\"{\n\021RpcResponseHead", + "er\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 \002(\0162\031.Rpc" + + "ResponseHeader.Status\"+\n\006Status\022\013\n\007SUCCE" + + "SS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017RpcRespons" + + "eBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcException\022" + + "\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstackTrace\030\002 \001" + + "(\tB<\n*org.apache.hadoop.hbase.protobuf.g" + + "eneratedB\tRPCProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_UserInformation_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_UserInformation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserInformation_descriptor, + new java.lang.String[] { "EffectiveUser", "RealUser", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); + internal_static_ConnectionHeader_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_ConnectionHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ConnectionHeader_descriptor, + new java.lang.String[] { "UserInfo", "Protocol", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); + internal_static_RpcRequestHeader_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_RpcRequestHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RpcRequestHeader_descriptor, + new java.lang.String[] { "CallId", "Tinfo", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.Builder.class); + internal_static_RpcRequestBody_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_RpcRequestBody_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RpcRequestBody_descriptor, + new java.lang.String[] { "MethodName", "ClientProtocolVersion", "Request", "RequestClassName", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.Builder.class); + internal_static_RpcResponseHeader_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_RpcResponseHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RpcResponseHeader_descriptor, + new java.lang.String[] { "CallId", "Status", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Builder.class); + internal_static_RpcResponseBody_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_RpcResponseBody_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RpcResponseBody_descriptor, + new java.lang.String[] { "Response", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.Builder.class); + internal_static_RpcException_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_RpcException_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RpcException_descriptor, + new java.lang.String[] { "ExceptionName", "StackTrace", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.Tracing.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java new file mode 100644 index 0000000..89eebc4 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java @@ -0,0 +1,4284 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RegionServerStatus.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class RegionServerStatusProtos { + private RegionServerStatusProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RegionServerStartupRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint32 port = 1; + boolean hasPort(); + int getPort(); + + // required uint64 serverStartCode = 2; + boolean hasServerStartCode(); + long getServerStartCode(); + + // required uint64 serverCurrentTime = 3; + boolean hasServerCurrentTime(); + long getServerCurrentTime(); + } + public static final class RegionServerStartupRequest extends + com.google.protobuf.GeneratedMessage + implements RegionServerStartupRequestOrBuilder { + // Use RegionServerStartupRequest.newBuilder() to construct. + private RegionServerStartupRequest(Builder builder) { + super(builder); + } + private RegionServerStartupRequest(boolean noInit) {} + + private static final RegionServerStartupRequest defaultInstance; + public static RegionServerStartupRequest getDefaultInstance() { + return defaultInstance; + } + + public RegionServerStartupRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable; + } + + private int bitField0_; + // required uint32 port = 1; + public static final int PORT_FIELD_NUMBER = 1; + private int port_; + public boolean hasPort() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getPort() { + return port_; + } + + // required uint64 serverStartCode = 2; + public static final int SERVERSTARTCODE_FIELD_NUMBER = 2; + private long serverStartCode_; + public boolean hasServerStartCode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getServerStartCode() { + return serverStartCode_; + } + + // required uint64 serverCurrentTime = 3; + public static final int SERVERCURRENTTIME_FIELD_NUMBER = 3; + private long serverCurrentTime_; + public boolean hasServerCurrentTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getServerCurrentTime() { + return serverCurrentTime_; + } + + private void initFields() { + port_ = 0; + serverStartCode_ = 0L; + serverCurrentTime_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPort()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerStartCode()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerCurrentTime()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, port_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, serverStartCode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, serverCurrentTime_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, port_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, serverStartCode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, serverCurrentTime_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) obj; + + boolean result = true; + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasServerStartCode() == other.hasServerStartCode()); + if (hasServerStartCode()) { + result = result && (getServerStartCode() + == other.getServerStartCode()); + } + result = result && (hasServerCurrentTime() == other.hasServerCurrentTime()); + if (hasServerCurrentTime()) { + result = result && (getServerCurrentTime() + == other.getServerCurrentTime()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasServerStartCode()) { + hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getServerStartCode()); + } + if (hasServerCurrentTime()) { + hash = (37 * hash) + SERVERCURRENTTIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getServerCurrentTime()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + serverStartCode_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + serverCurrentTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.serverStartCode_ = serverStartCode_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.serverCurrentTime_ = serverCurrentTime_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance()) return this; + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasServerStartCode()) { + setServerStartCode(other.getServerStartCode()); + } + if (other.hasServerCurrentTime()) { + setServerCurrentTime(other.getServerCurrentTime()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPort()) { + + return false; + } + if (!hasServerStartCode()) { + + return false; + } + if (!hasServerCurrentTime()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + port_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + serverStartCode_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + serverCurrentTime_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required uint32 port = 1; + private int port_ ; + public boolean hasPort() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getPort() { + return port_; + } + public Builder setPort(int value) { + bitField0_ |= 0x00000001; + port_ = value; + onChanged(); + return this; + } + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000001); + port_ = 0; + onChanged(); + return this; + } + + // required uint64 serverStartCode = 2; + private long serverStartCode_ ; + public boolean hasServerStartCode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getServerStartCode() { + return serverStartCode_; + } + public Builder setServerStartCode(long value) { + bitField0_ |= 0x00000002; + serverStartCode_ = value; + onChanged(); + return this; + } + public Builder clearServerStartCode() { + bitField0_ = (bitField0_ & ~0x00000002); + serverStartCode_ = 0L; + onChanged(); + return this; + } + + // required uint64 serverCurrentTime = 3; + private long serverCurrentTime_ ; + public boolean hasServerCurrentTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getServerCurrentTime() { + return serverCurrentTime_; + } + public Builder setServerCurrentTime(long value) { + bitField0_ |= 0x00000004; + serverCurrentTime_ = value; + onChanged(); + return this; + } + public Builder clearServerCurrentTime() { + bitField0_ = (bitField0_ & ~0x00000004); + serverCurrentTime_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionServerStartupRequest) + } + + static { + defaultInstance = new RegionServerStartupRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionServerStartupRequest) + } + + public interface RegionServerStartupResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .NameStringPair mapEntries = 1; + java.util.List + getMapEntriesList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index); + int getMapEntriesCount(); + java.util.List + getMapEntriesOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( + int index); + } + public static final class RegionServerStartupResponse extends + com.google.protobuf.GeneratedMessage + implements RegionServerStartupResponseOrBuilder { + // Use RegionServerStartupResponse.newBuilder() to construct. + private RegionServerStartupResponse(Builder builder) { + super(builder); + } + private RegionServerStartupResponse(boolean noInit) {} + + private static final RegionServerStartupResponse defaultInstance; + public static RegionServerStartupResponse getDefaultInstance() { + return defaultInstance; + } + + public RegionServerStartupResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable; + } + + // repeated .NameStringPair mapEntries = 1; + public static final int MAPENTRIES_FIELD_NUMBER = 1; + private java.util.List mapEntries_; + public java.util.List getMapEntriesList() { + return mapEntries_; + } + public java.util.List + getMapEntriesOrBuilderList() { + return mapEntries_; + } + public int getMapEntriesCount() { + return mapEntries_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { + return mapEntries_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( + int index) { + return mapEntries_.get(index); + } + + private void initFields() { + mapEntries_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getMapEntriesCount(); i++) { + if (!getMapEntries(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < mapEntries_.size(); i++) { + output.writeMessage(1, mapEntries_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < mapEntries_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, mapEntries_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) obj; + + boolean result = true; + result = result && getMapEntriesList() + .equals(other.getMapEntriesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getMapEntriesCount() > 0) { + hash = (37 * hash) + MAPENTRIES_FIELD_NUMBER; + hash = (53 * hash) + getMapEntriesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMapEntriesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (mapEntriesBuilder_ == null) { + mapEntries_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + mapEntriesBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(this); + int from_bitField0_ = bitField0_; + if (mapEntriesBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.mapEntries_ = mapEntries_; + } else { + result.mapEntries_ = mapEntriesBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()) return this; + if (mapEntriesBuilder_ == null) { + if (!other.mapEntries_.isEmpty()) { + if (mapEntries_.isEmpty()) { + mapEntries_ = other.mapEntries_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMapEntriesIsMutable(); + mapEntries_.addAll(other.mapEntries_); + } + onChanged(); + } + } else { + if (!other.mapEntries_.isEmpty()) { + if (mapEntriesBuilder_.isEmpty()) { + mapEntriesBuilder_.dispose(); + mapEntriesBuilder_ = null; + mapEntries_ = other.mapEntries_; + bitField0_ = (bitField0_ & ~0x00000001); + mapEntriesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getMapEntriesFieldBuilder() : null; + } else { + mapEntriesBuilder_.addAllMessages(other.mapEntries_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getMapEntriesCount(); i++) { + if (!getMapEntries(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addMapEntries(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .NameStringPair mapEntries = 1; + private java.util.List mapEntries_ = + java.util.Collections.emptyList(); + private void ensureMapEntriesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = new java.util.ArrayList(mapEntries_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_; + + public java.util.List getMapEntriesList() { + if (mapEntriesBuilder_ == null) { + return java.util.Collections.unmodifiableList(mapEntries_); + } else { + return mapEntriesBuilder_.getMessageList(); + } + } + public int getMapEntriesCount() { + if (mapEntriesBuilder_ == null) { + return mapEntries_.size(); + } else { + return mapEntriesBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { + if (mapEntriesBuilder_ == null) { + return mapEntries_.get(index); + } else { + return mapEntriesBuilder_.getMessage(index); + } + } + public Builder setMapEntries( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (mapEntriesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntriesIsMutable(); + mapEntries_.set(index, value); + onChanged(); + } else { + mapEntriesBuilder_.setMessage(index, value); + } + return this; + } + public Builder setMapEntries( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.set(index, builderForValue.build()); + onChanged(); + } else { + mapEntriesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addMapEntries(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (mapEntriesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntriesIsMutable(); + mapEntries_.add(value); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(value); + } + return this; + } + public Builder addMapEntries( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (mapEntriesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntriesIsMutable(); + mapEntries_.add(index, value); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(index, value); + } + return this; + } + public Builder addMapEntries( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.add(builderForValue.build()); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addMapEntries( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.add(index, builderForValue.build()); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllMapEntries( + java.lang.Iterable values) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + super.addAll(values, mapEntries_); + onChanged(); + } else { + mapEntriesBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearMapEntries() { + if (mapEntriesBuilder_ == null) { + mapEntries_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + mapEntriesBuilder_.clear(); + } + return this; + } + public Builder removeMapEntries(int index) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.remove(index); + onChanged(); + } else { + mapEntriesBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder( + int index) { + return getMapEntriesFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( + int index) { + if (mapEntriesBuilder_ == null) { + return mapEntries_.get(index); } else { + return mapEntriesBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getMapEntriesOrBuilderList() { + if (mapEntriesBuilder_ != null) { + return mapEntriesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(mapEntries_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() { + return getMapEntriesFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder( + int index) { + return getMapEntriesFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + public java.util.List + getMapEntriesBuilderList() { + return getMapEntriesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> + getMapEntriesFieldBuilder() { + if (mapEntriesBuilder_ == null) { + mapEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( + mapEntries_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + mapEntries_ = null; + } + return mapEntriesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RegionServerStartupResponse) + } + + static { + defaultInstance = new RegionServerStartupResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionServerStartupResponse) + } + + public interface RegionServerReportRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName server = 1; + boolean hasServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + + // optional .ServerLoad load = 2; + boolean hasLoad(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder(); + } + public static final class RegionServerReportRequest extends + com.google.protobuf.GeneratedMessage + implements RegionServerReportRequestOrBuilder { + // Use RegionServerReportRequest.newBuilder() to construct. + private RegionServerReportRequest(Builder builder) { + super(builder); + } + private RegionServerReportRequest(boolean noInit) {} + + private static final RegionServerReportRequest defaultInstance; + public static RegionServerReportRequest getDefaultInstance() { + return defaultInstance; + } + + public RegionServerReportRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName server = 1; + public static final int SERVER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + return server_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + return server_; + } + + // optional .ServerLoad load = 2; + public static final int LOAD_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_; + public boolean hasLoad() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() { + return load_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() { + return load_; + } + + private void initFields() { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServer()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasLoad()) { + if (!getLoad().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, load_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, load_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) obj; + + boolean result = true; + result = result && (hasServer() == other.hasServer()); + if (hasServer()) { + result = result && getServer() + .equals(other.getServer()); + } + result = result && (hasLoad() == other.hasLoad()); + if (hasLoad()) { + result = result && getLoad() + .equals(other.getLoad()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServer()) { + hash = (37 * hash) + SERVER_FIELD_NUMBER; + hash = (53 * hash) + getServer().hashCode(); + } + if (hasLoad()) { + hash = (37 * hash) + LOAD_FIELD_NUMBER; + hash = (53 * hash) + getLoad().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerFieldBuilder(); + getLoadFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (loadBuilder_ == null) { + load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + } else { + loadBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverBuilder_ == null) { + result.server_ = server_; + } else { + result.server_ = serverBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (loadBuilder_ == null) { + result.load_ = load_; + } else { + result.load_ = loadBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance()) return this; + if (other.hasServer()) { + mergeServer(other.getServer()); + } + if (other.hasLoad()) { + mergeLoad(other.getLoad()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServer()) { + + return false; + } + if (!getServer().isInitialized()) { + + return false; + } + if (hasLoad()) { + if (!getLoad().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServer()) { + subBuilder.mergeFrom(getServer()); + } + input.readMessage(subBuilder, extensionRegistry); + setServer(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); + if (hasLoad()) { + subBuilder.mergeFrom(getLoad()); + } + input.readMessage(subBuilder, extensionRegistry); + setLoad(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName server = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + if (serverBuilder_ == null) { + return server_; + } else { + return serverBuilder_.getMessage(); + } + } + public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + server_ = value; + onChanged(); + } else { + serverBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServer( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverBuilder_ == null) { + server_ = builderForValue.build(); + onChanged(); + } else { + serverBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + server_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); + } else { + server_ = value; + } + onChanged(); + } else { + serverBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServer() { + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + if (serverBuilder_ != null) { + return serverBuilder_.getMessageOrBuilder(); + } else { + return server_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerFieldBuilder() { + if (serverBuilder_ == null) { + serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + server_, + getParentForChildren(), + isClean()); + server_ = null; + } + return serverBuilder_; + } + + // optional .ServerLoad load = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> loadBuilder_; + public boolean hasLoad() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() { + if (loadBuilder_ == null) { + return load_; + } else { + return loadBuilder_.getMessage(); + } + } + public Builder setLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { + if (loadBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + load_ = value; + onChanged(); + } else { + loadBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setLoad( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { + if (loadBuilder_ == null) { + load_ = builderForValue.build(); + onChanged(); + } else { + loadBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { + if (loadBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + load_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) { + load_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(load_).mergeFrom(value).buildPartial(); + } else { + load_ = value; + } + onChanged(); + } else { + loadBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearLoad() { + if (loadBuilder_ == null) { + load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); + onChanged(); + } else { + loadBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getLoadBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getLoadFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() { + if (loadBuilder_ != null) { + return loadBuilder_.getMessageOrBuilder(); + } else { + return load_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> + getLoadFieldBuilder() { + if (loadBuilder_ == null) { + loadBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder>( + load_, + getParentForChildren(), + isClean()); + load_ = null; + } + return loadBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RegionServerReportRequest) + } + + static { + defaultInstance = new RegionServerReportRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionServerReportRequest) + } + + public interface RegionServerReportResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class RegionServerReportResponse extends + com.google.protobuf.GeneratedMessage + implements RegionServerReportResponseOrBuilder { + // Use RegionServerReportResponse.newBuilder() to construct. + private RegionServerReportResponse(Builder builder) { + super(builder); + } + private RegionServerReportResponse(boolean noInit) {} + + private static final RegionServerReportResponse defaultInstance; + public static RegionServerReportResponse getDefaultInstance() { + return defaultInstance; + } + + public RegionServerReportResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:RegionServerReportResponse) + } + + static { + defaultInstance = new RegionServerReportResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionServerReportResponse) + } + + public interface ReportRSFatalErrorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName server = 1; + boolean hasServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + + // required string errorMessage = 2; + boolean hasErrorMessage(); + String getErrorMessage(); + } + public static final class ReportRSFatalErrorRequest extends + com.google.protobuf.GeneratedMessage + implements ReportRSFatalErrorRequestOrBuilder { + // Use ReportRSFatalErrorRequest.newBuilder() to construct. + private ReportRSFatalErrorRequest(Builder builder) { + super(builder); + } + private ReportRSFatalErrorRequest(boolean noInit) {} + + private static final ReportRSFatalErrorRequest defaultInstance; + public static ReportRSFatalErrorRequest getDefaultInstance() { + return defaultInstance; + } + + public ReportRSFatalErrorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName server = 1; + public static final int SERVER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + return server_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + return server_; + } + + // required string errorMessage = 2; + public static final int ERRORMESSAGE_FIELD_NUMBER = 2; + private java.lang.Object errorMessage_; + public boolean hasErrorMessage() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getErrorMessage() { + java.lang.Object ref = errorMessage_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + errorMessage_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getErrorMessageBytes() { + java.lang.Object ref = errorMessage_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + errorMessage_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + errorMessage_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServer()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasErrorMessage()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getErrorMessageBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getErrorMessageBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) obj; + + boolean result = true; + result = result && (hasServer() == other.hasServer()); + if (hasServer()) { + result = result && getServer() + .equals(other.getServer()); + } + result = result && (hasErrorMessage() == other.hasErrorMessage()); + if (hasErrorMessage()) { + result = result && getErrorMessage() + .equals(other.getErrorMessage()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServer()) { + hash = (37 * hash) + SERVER_FIELD_NUMBER; + hash = (53 * hash) + getServer().hashCode(); + } + if (hasErrorMessage()) { + hash = (37 * hash) + ERRORMESSAGE_FIELD_NUMBER; + hash = (53 * hash) + getErrorMessage().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + errorMessage_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverBuilder_ == null) { + result.server_ = server_; + } else { + result.server_ = serverBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.errorMessage_ = errorMessage_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance()) return this; + if (other.hasServer()) { + mergeServer(other.getServer()); + } + if (other.hasErrorMessage()) { + setErrorMessage(other.getErrorMessage()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServer()) { + + return false; + } + if (!hasErrorMessage()) { + + return false; + } + if (!getServer().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServer()) { + subBuilder.mergeFrom(getServer()); + } + input.readMessage(subBuilder, extensionRegistry); + setServer(subBuilder.buildPartial()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + errorMessage_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName server = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + if (serverBuilder_ == null) { + return server_; + } else { + return serverBuilder_.getMessage(); + } + } + public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + server_ = value; + onChanged(); + } else { + serverBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServer( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverBuilder_ == null) { + server_ = builderForValue.build(); + onChanged(); + } else { + serverBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + server_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); + } else { + server_ = value; + } + onChanged(); + } else { + serverBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServer() { + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + if (serverBuilder_ != null) { + return serverBuilder_.getMessageOrBuilder(); + } else { + return server_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerFieldBuilder() { + if (serverBuilder_ == null) { + serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + server_, + getParentForChildren(), + isClean()); + server_ = null; + } + return serverBuilder_; + } + + // required string errorMessage = 2; + private java.lang.Object errorMessage_ = ""; + public boolean hasErrorMessage() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getErrorMessage() { + java.lang.Object ref = errorMessage_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + errorMessage_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setErrorMessage(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + errorMessage_ = value; + onChanged(); + return this; + } + public Builder clearErrorMessage() { + bitField0_ = (bitField0_ & ~0x00000002); + errorMessage_ = getDefaultInstance().getErrorMessage(); + onChanged(); + return this; + } + void setErrorMessage(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + errorMessage_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:ReportRSFatalErrorRequest) + } + + static { + defaultInstance = new ReportRSFatalErrorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReportRSFatalErrorRequest) + } + + public interface ReportRSFatalErrorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ReportRSFatalErrorResponse extends + com.google.protobuf.GeneratedMessage + implements ReportRSFatalErrorResponseOrBuilder { + // Use ReportRSFatalErrorResponse.newBuilder() to construct. + private ReportRSFatalErrorResponse(Builder builder) { + super(builder); + } + private ReportRSFatalErrorResponse(boolean noInit) {} + + private static final ReportRSFatalErrorResponse defaultInstance; + public static ReportRSFatalErrorResponse getDefaultInstance() { + return defaultInstance; + } + + public ReportRSFatalErrorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ReportRSFatalErrorResponse) + } + + static { + defaultInstance = new ReportRSFatalErrorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReportRSFatalErrorResponse) + } + + public interface GetLastFlushedSequenceIdRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes regionName = 1; + boolean hasRegionName(); + com.google.protobuf.ByteString getRegionName(); + } + public static final class GetLastFlushedSequenceIdRequest extends + com.google.protobuf.GeneratedMessage + implements GetLastFlushedSequenceIdRequestOrBuilder { + // Use GetLastFlushedSequenceIdRequest.newBuilder() to construct. + private GetLastFlushedSequenceIdRequest(Builder builder) { + super(builder); + } + private GetLastFlushedSequenceIdRequest(boolean noInit) {} + + private static final GetLastFlushedSequenceIdRequest defaultInstance; + public static GetLastFlushedSequenceIdRequest getDefaultInstance() { + return defaultInstance; + } + + public GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes regionName = 1; + public static final int REGIONNAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString regionName_; + public boolean hasRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRegionName() { + return regionName_; + } + + private void initFields() { + regionName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, regionName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, regionName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) obj; + + boolean result = true; + result = result && (hasRegionName() == other.hasRegionName()); + if (hasRegionName()) { + result = result && getRegionName() + .equals(other.getRegionName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionName()) { + hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getRegionName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.regionName_ = regionName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance()) return this; + if (other.hasRegionName()) { + setRegionName(other.getRegionName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + regionName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes regionName = 1; + private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRegionName() { + return regionName_; + } + public Builder setRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + regionName_ = value; + onChanged(); + return this; + } + public Builder clearRegionName() { + bitField0_ = (bitField0_ & ~0x00000001); + regionName_ = getDefaultInstance().getRegionName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetLastFlushedSequenceIdRequest) + } + + static { + defaultInstance = new GetLastFlushedSequenceIdRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetLastFlushedSequenceIdRequest) + } + + public interface GetLastFlushedSequenceIdResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lastFlushedSequenceId = 1; + boolean hasLastFlushedSequenceId(); + long getLastFlushedSequenceId(); + } + public static final class GetLastFlushedSequenceIdResponse extends + com.google.protobuf.GeneratedMessage + implements GetLastFlushedSequenceIdResponseOrBuilder { + // Use GetLastFlushedSequenceIdResponse.newBuilder() to construct. + private GetLastFlushedSequenceIdResponse(Builder builder) { + super(builder); + } + private GetLastFlushedSequenceIdResponse(boolean noInit) {} + + private static final GetLastFlushedSequenceIdResponse defaultInstance; + public static GetLastFlushedSequenceIdResponse getDefaultInstance() { + return defaultInstance; + } + + public GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lastFlushedSequenceId = 1; + public static final int LASTFLUSHEDSEQUENCEID_FIELD_NUMBER = 1; + private long lastFlushedSequenceId_; + public boolean hasLastFlushedSequenceId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushedSequenceId() { + return lastFlushedSequenceId_; + } + + private void initFields() { + lastFlushedSequenceId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLastFlushedSequenceId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lastFlushedSequenceId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lastFlushedSequenceId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) obj; + + boolean result = true; + result = result && (hasLastFlushedSequenceId() == other.hasLastFlushedSequenceId()); + if (hasLastFlushedSequenceId()) { + result = result && (getLastFlushedSequenceId() + == other.getLastFlushedSequenceId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLastFlushedSequenceId()) { + hash = (37 * hash) + LASTFLUSHEDSEQUENCEID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLastFlushedSequenceId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lastFlushedSequenceId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lastFlushedSequenceId_ = lastFlushedSequenceId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()) return this; + if (other.hasLastFlushedSequenceId()) { + setLastFlushedSequenceId(other.getLastFlushedSequenceId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLastFlushedSequenceId()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lastFlushedSequenceId_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lastFlushedSequenceId = 1; + private long lastFlushedSequenceId_ ; + public boolean hasLastFlushedSequenceId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushedSequenceId() { + return lastFlushedSequenceId_; + } + public Builder setLastFlushedSequenceId(long value) { + bitField0_ |= 0x00000001; + lastFlushedSequenceId_ = value; + onChanged(); + return this; + } + public Builder clearLastFlushedSequenceId() { + bitField0_ = (bitField0_ & ~0x00000001); + lastFlushedSequenceId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetLastFlushedSequenceIdResponse) + } + + static { + defaultInstance = new GetLastFlushedSequenceIdResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetLastFlushedSequenceIdResponse) + } + + public static abstract class RegionServerStatusService + implements com.google.protobuf.Service { + protected RegionServerStatusService() {} + + public interface Interface { + public abstract void regionServerStartup( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void regionServerReport( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void reportRSFatalError( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getLastFlushedSequenceId( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new RegionServerStatusService() { + @java.lang.Override + public void regionServerStartup( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, + com.google.protobuf.RpcCallback done) { + impl.regionServerStartup(controller, request, done); + } + + @java.lang.Override + public void regionServerReport( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, + com.google.protobuf.RpcCallback done) { + impl.regionServerReport(controller, request, done); + } + + @java.lang.Override + public void reportRSFatalError( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, + com.google.protobuf.RpcCallback done) { + impl.reportRSFatalError(controller, request, done); + } + + @java.lang.Override + public void getLastFlushedSequenceId( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, + com.google.protobuf.RpcCallback done) { + impl.getLastFlushedSequenceId(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.regionServerStartup(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)request); + case 1: + return impl.regionServerReport(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)request); + case 2: + return impl.reportRSFatalError(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)request); + case 3: + return impl.getLastFlushedSequenceId(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void regionServerStartup( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void regionServerReport( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void reportRSFatalError( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getLastFlushedSequenceId( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.regionServerStartup(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.regionServerReport(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.reportRSFatalError(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.getLastFlushedSequenceId(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void regionServerStartup( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance())); + } + + public void regionServerReport( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance())); + } + + public void reportRSFatalError( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance())); + } + + public void getLastFlushedSequenceId( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionServerStartupRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionServerStartupRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionServerStartupResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionServerStartupResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionServerReportRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionServerReportRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionServerReportResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionServerReportResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReportRSFatalErrorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReportRSFatalErrorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetLastFlushedSequenceIdRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetLastFlushedSequenceIdResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\030RegionServerStatus.proto\032\013hbase.proto\"" + + "^\n\032RegionServerStartupRequest\022\014\n\004port\030\001 " + + "\002(\r\022\027\n\017serverStartCode\030\002 \002(\004\022\031\n\021serverCu" + + "rrentTime\030\003 \002(\004\"B\n\033RegionServerStartupRe" + + "sponse\022#\n\nmapEntries\030\001 \003(\0132\017.NameStringP" + + "air\"S\n\031RegionServerReportRequest\022\033\n\006serv" + + "er\030\001 \002(\0132\013.ServerName\022\031\n\004load\030\002 \001(\0132\013.Se" + + "rverLoad\"\034\n\032RegionServerReportResponse\"N" + + "\n\031ReportRSFatalErrorRequest\022\033\n\006server\030\001 " + + "\002(\0132\013.ServerName\022\024\n\014errorMessage\030\002 \002(\t\"\034", + "\n\032ReportRSFatalErrorResponse\"5\n\037GetLastF" + + "lushedSequenceIdRequest\022\022\n\nregionName\030\001 " + + "\002(\014\"A\n GetLastFlushedSequenceIdResponse\022" + + "\035\n\025lastFlushedSequenceId\030\001 \002(\0042\354\002\n\031Regio" + + "nServerStatusService\022P\n\023regionServerStar" + + "tup\022\033.RegionServerStartupRequest\032\034.Regio" + + "nServerStartupResponse\022M\n\022regionServerRe" + + "port\022\032.RegionServerReportRequest\032\033.Regio" + + "nServerReportResponse\022M\n\022reportRSFatalEr" + + "ror\022\032.ReportRSFatalErrorRequest\032\033.Report", + "RSFatalErrorResponse\022_\n\030getLastFlushedSe" + + "quenceId\022 .GetLastFlushedSequenceIdReque" + + "st\032!.GetLastFlushedSequenceIdResponseBN\n" + + "*org.apache.hadoop.hbase.protobuf.genera" + + "tedB\030RegionServerStatusProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RegionServerStartupRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RegionServerStartupRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionServerStartupRequest_descriptor, + new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); + internal_static_RegionServerStartupResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_RegionServerStartupResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionServerStartupResponse_descriptor, + new java.lang.String[] { "MapEntries", }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); + internal_static_RegionServerReportRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_RegionServerReportRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionServerReportRequest_descriptor, + new java.lang.String[] { "Server", "Load", }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); + internal_static_RegionServerReportResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_RegionServerReportResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionServerReportResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class); + internal_static_ReportRSFatalErrorRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_ReportRSFatalErrorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReportRSFatalErrorRequest_descriptor, + new java.lang.String[] { "Server", "ErrorMessage", }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); + internal_static_ReportRSFatalErrorResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_ReportRSFatalErrorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReportRSFatalErrorResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class); + internal_static_GetLastFlushedSequenceIdRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetLastFlushedSequenceIdRequest_descriptor, + new java.lang.String[] { "RegionName", }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class); + internal_static_GetLastFlushedSequenceIdResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetLastFlushedSequenceIdResponse_descriptor, + new java.lang.String[] { "LastFlushedSequenceId", }, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java new file mode 100644 index 0000000..e634f17 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java @@ -0,0 +1,490 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Tracing.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class Tracing { + private Tracing() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RPCTInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional int64 traceId = 1; + boolean hasTraceId(); + long getTraceId(); + + // optional int64 parentId = 2; + boolean hasParentId(); + long getParentId(); + } + public static final class RPCTInfo extends + com.google.protobuf.GeneratedMessage + implements RPCTInfoOrBuilder { + // Use RPCTInfo.newBuilder() to construct. + private RPCTInfo(Builder builder) { + super(builder); + } + private RPCTInfo(boolean noInit) {} + + private static final RPCTInfo defaultInstance; + public static RPCTInfo getDefaultInstance() { + return defaultInstance; + } + + public RPCTInfo getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable; + } + + private int bitField0_; + // optional int64 traceId = 1; + public static final int TRACEID_FIELD_NUMBER = 1; + private long traceId_; + public boolean hasTraceId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getTraceId() { + return traceId_; + } + + // optional int64 parentId = 2; + public static final int PARENTID_FIELD_NUMBER = 2; + private long parentId_; + public boolean hasParentId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getParentId() { + return parentId_; + } + + private void initFields() { + traceId_ = 0L; + parentId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, traceId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt64(2, parentId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, traceId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, parentId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj; + + boolean result = true; + result = result && (hasTraceId() == other.hasTraceId()); + if (hasTraceId()) { + result = result && (getTraceId() + == other.getTraceId()); + } + result = result && (hasParentId() == other.hasParentId()); + if (hasParentId()) { + result = result && (getParentId() + == other.getParentId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTraceId()) { + hash = (37 * hash) + TRACEID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTraceId()); + } + if (hasParentId()) { + hash = (37 * hash) + PARENTID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getParentId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + traceId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + parentId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() { + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.traceId_ = traceId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.parentId_ = parentId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this; + if (other.hasTraceId()) { + setTraceId(other.getTraceId()); + } + if (other.hasParentId()) { + setParentId(other.getParentId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + traceId_ = input.readInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + parentId_ = input.readInt64(); + break; + } + } + } + } + + private int bitField0_; + + // optional int64 traceId = 1; + private long traceId_ ; + public boolean hasTraceId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getTraceId() { + return traceId_; + } + public Builder setTraceId(long value) { + bitField0_ |= 0x00000001; + traceId_ = value; + onChanged(); + return this; + } + public Builder clearTraceId() { + bitField0_ = (bitField0_ & ~0x00000001); + traceId_ = 0L; + onChanged(); + return this; + } + + // optional int64 parentId = 2; + private long parentId_ ; + public boolean hasParentId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getParentId() { + return parentId_; + } + public Builder setParentId(long value) { + bitField0_ |= 0x00000002; + parentId_ = value; + onChanged(); + return this; + } + public Builder clearParentId() { + bitField0_ = (bitField0_ & ~0x00000002); + parentId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RPCTInfo) + } + + static { + defaultInstance = new RPCTInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RPCTInfo) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RPCTInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RPCTInfo_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\rTracing.proto\"-\n\010RPCTInfo\022\017\n\007traceId\030\001" + + " \001(\003\022\020\n\010parentId\030\002 \001(\003B:\n*org.apache.had" + + "oop.hbase.protobuf.generatedB\007TracingH\001\240" + + "\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RPCTInfo_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RPCTInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RPCTInfo_descriptor, + new java.lang.String[] { "TraceId", "ParentId", }, + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java new file mode 100644 index 0000000..93999c6 --- /dev/null +++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java @@ -0,0 +1,5082 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ZooKeeper.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ZooKeeperProtos { + private ZooKeeperProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RootRegionServerOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName server = 1; + boolean hasServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + } + public static final class RootRegionServer extends + com.google.protobuf.GeneratedMessage + implements RootRegionServerOrBuilder { + // Use RootRegionServer.newBuilder() to construct. + private RootRegionServer(Builder builder) { + super(builder); + } + private RootRegionServer(boolean noInit) {} + + private static final RootRegionServer defaultInstance; + public static RootRegionServer getDefaultInstance() { + return defaultInstance; + } + + public RootRegionServer getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName server = 1; + public static final int SERVER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + return server_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + return server_; + } + + private void initFields() { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServer()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, server_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, server_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) obj; + + boolean result = true; + result = result && (hasServer() == other.hasServer()); + if (hasServer()) { + result = result && getServer() + .equals(other.getServer()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServer()) { + hash = (37 * hash) + SERVER_FIELD_NUMBER; + hash = (53 * hash) + getServer().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServerOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverBuilder_ == null) { + result.server_ = server_; + } else { + result.server_ = serverBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDefaultInstance()) return this; + if (other.hasServer()) { + mergeServer(other.getServer()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServer()) { + + return false; + } + if (!getServer().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServer()) { + subBuilder.mergeFrom(getServer()); + } + input.readMessage(subBuilder, extensionRegistry); + setServer(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName server = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { + if (serverBuilder_ == null) { + return server_; + } else { + return serverBuilder_.getMessage(); + } + } + public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + server_ = value; + onChanged(); + } else { + serverBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServer( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverBuilder_ == null) { + server_ = builderForValue.build(); + onChanged(); + } else { + serverBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + server_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); + } else { + server_ = value; + } + onChanged(); + } else { + serverBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServer() { + if (serverBuilder_ == null) { + server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + if (serverBuilder_ != null) { + return serverBuilder_.getMessageOrBuilder(); + } else { + return server_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerFieldBuilder() { + if (serverBuilder_ == null) { + serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + server_, + getParentForChildren(), + isClean()); + server_ = null; + } + return serverBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RootRegionServer) + } + + static { + defaultInstance = new RootRegionServer(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RootRegionServer) + } + + public interface MasterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName master = 1; + boolean hasMaster(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); + } + public static final class Master extends + com.google.protobuf.GeneratedMessage + implements MasterOrBuilder { + // Use Master.newBuilder() to construct. + private Master(Builder builder) { + super(builder); + } + private Master(boolean noInit) {} + + private static final Master defaultInstance; + public static Master getDefaultInstance() { + return defaultInstance; + } + + public Master getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName master = 1; + public static final int MASTER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; + public boolean hasMaster() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { + return master_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { + return master_; + } + + private void initFields() { + master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasMaster()) { + memoizedIsInitialized = 0; + return false; + } + if (!getMaster().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, master_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, master_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) obj; + + boolean result = true; + result = result && (hasMaster() == other.hasMaster()); + if (hasMaster()) { + result = result && getMaster() + .equals(other.getMaster()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasMaster()) { + hash = (37 * hash) + MASTER_FIELD_NUMBER; + hash = (53 * hash) + getMaster().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MasterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMasterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (masterBuilder_ == null) { + master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + masterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (masterBuilder_ == null) { + result.master_ = master_; + } else { + result.master_ = masterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance()) return this; + if (other.hasMaster()) { + mergeMaster(other.getMaster()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasMaster()) { + + return false; + } + if (!getMaster().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasMaster()) { + subBuilder.mergeFrom(getMaster()); + } + input.readMessage(subBuilder, extensionRegistry); + setMaster(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName master = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; + public boolean hasMaster() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { + if (masterBuilder_ == null) { + return master_; + } else { + return masterBuilder_.getMessage(); + } + } + public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (masterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + master_ = value; + onChanged(); + } else { + masterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setMaster( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (masterBuilder_ == null) { + master_ = builderForValue.build(); + onChanged(); + } else { + masterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (masterBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + master_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + master_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); + } else { + master_ = value; + } + onChanged(); + } else { + masterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearMaster() { + if (masterBuilder_ == null) { + master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + masterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getMasterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { + if (masterBuilder_ != null) { + return masterBuilder_.getMessageOrBuilder(); + } else { + return master_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getMasterFieldBuilder() { + if (masterBuilder_ == null) { + masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + master_, + getParentForChildren(), + isClean()); + master_ = null; + } + return masterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Master) + } + + static { + defaultInstance = new Master(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Master) + } + + public interface ClusterUpOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string startDate = 1; + boolean hasStartDate(); + String getStartDate(); + } + public static final class ClusterUp extends + com.google.protobuf.GeneratedMessage + implements ClusterUpOrBuilder { + // Use ClusterUp.newBuilder() to construct. + private ClusterUp(Builder builder) { + super(builder); + } + private ClusterUp(boolean noInit) {} + + private static final ClusterUp defaultInstance; + public static ClusterUp getDefaultInstance() { + return defaultInstance; + } + + public ClusterUp getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable; + } + + private int bitField0_; + // required string startDate = 1; + public static final int STARTDATE_FIELD_NUMBER = 1; + private java.lang.Object startDate_; + public boolean hasStartDate() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getStartDate() { + java.lang.Object ref = startDate_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + startDate_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getStartDateBytes() { + java.lang.Object ref = startDate_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + startDate_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + startDate_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasStartDate()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getStartDateBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getStartDateBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) obj; + + boolean result = true; + result = result && (hasStartDate() == other.hasStartDate()); + if (hasStartDate()) { + result = result && getStartDate() + .equals(other.getStartDate()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasStartDate()) { + hash = (37 * hash) + STARTDATE_FIELD_NUMBER; + hash = (53 * hash) + getStartDate().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + startDate_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.startDate_ = startDate_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance()) return this; + if (other.hasStartDate()) { + setStartDate(other.getStartDate()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasStartDate()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + startDate_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string startDate = 1; + private java.lang.Object startDate_ = ""; + public boolean hasStartDate() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getStartDate() { + java.lang.Object ref = startDate_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + startDate_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setStartDate(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + startDate_ = value; + onChanged(); + return this; + } + public Builder clearStartDate() { + bitField0_ = (bitField0_ & ~0x00000001); + startDate_ = getDefaultInstance().getStartDate(); + onChanged(); + return this; + } + void setStartDate(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + startDate_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:ClusterUp) + } + + static { + defaultInstance = new ClusterUp(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ClusterUp) + } + + public interface RegionTransitionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint32 eventTypeCode = 1; + boolean hasEventTypeCode(); + int getEventTypeCode(); + + // required bytes regionName = 2; + boolean hasRegionName(); + com.google.protobuf.ByteString getRegionName(); + + // required uint64 createTime = 3; + boolean hasCreateTime(); + long getCreateTime(); + + // required .ServerName serverName = 4; + boolean hasServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); + + // optional bytes payload = 5; + boolean hasPayload(); + com.google.protobuf.ByteString getPayload(); + } + public static final class RegionTransition extends + com.google.protobuf.GeneratedMessage + implements RegionTransitionOrBuilder { + // Use RegionTransition.newBuilder() to construct. + private RegionTransition(Builder builder) { + super(builder); + } + private RegionTransition(boolean noInit) {} + + private static final RegionTransition defaultInstance; + public static RegionTransition getDefaultInstance() { + return defaultInstance; + } + + public RegionTransition getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable; + } + + private int bitField0_; + // required uint32 eventTypeCode = 1; + public static final int EVENTTYPECODE_FIELD_NUMBER = 1; + private int eventTypeCode_; + public boolean hasEventTypeCode() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getEventTypeCode() { + return eventTypeCode_; + } + + // required bytes regionName = 2; + public static final int REGIONNAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString regionName_; + public boolean hasRegionName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getRegionName() { + return regionName_; + } + + // required uint64 createTime = 3; + public static final int CREATETIME_FIELD_NUMBER = 3; + private long createTime_; + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getCreateTime() { + return createTime_; + } + + // required .ServerName serverName = 4; + public static final int SERVERNAME_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + return serverName_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + return serverName_; + } + + // optional bytes payload = 5; + public static final int PAYLOAD_FIELD_NUMBER = 5; + private com.google.protobuf.ByteString payload_; + public boolean hasPayload() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + + private void initFields() { + eventTypeCode_ = 0; + regionName_ = com.google.protobuf.ByteString.EMPTY; + createTime_ = 0L; + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + payload_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEventTypeCode()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCreateTime()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, eventTypeCode_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, regionName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, createTime_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(4, serverName_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBytes(5, payload_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, eventTypeCode_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, regionName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, createTime_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, serverName_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, payload_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) obj; + + boolean result = true; + result = result && (hasEventTypeCode() == other.hasEventTypeCode()); + if (hasEventTypeCode()) { + result = result && (getEventTypeCode() + == other.getEventTypeCode()); + } + result = result && (hasRegionName() == other.hasRegionName()); + if (hasRegionName()) { + result = result && getRegionName() + .equals(other.getRegionName()); + } + result = result && (hasCreateTime() == other.hasCreateTime()); + if (hasCreateTime()) { + result = result && (getCreateTime() + == other.getCreateTime()); + } + result = result && (hasServerName() == other.hasServerName()); + if (hasServerName()) { + result = result && getServerName() + .equals(other.getServerName()); + } + result = result && (hasPayload() == other.hasPayload()); + if (hasPayload()) { + result = result && getPayload() + .equals(other.getPayload()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEventTypeCode()) { + hash = (37 * hash) + EVENTTYPECODE_FIELD_NUMBER; + hash = (53 * hash) + getEventTypeCode(); + } + if (hasRegionName()) { + hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getRegionName().hashCode(); + } + if (hasCreateTime()) { + hash = (37 * hash) + CREATETIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCreateTime()); + } + if (hasServerName()) { + hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; + hash = (53 * hash) + getServerName().hashCode(); + } + if (hasPayload()) { + hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; + hash = (53 * hash) + getPayload().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransitionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + eventTypeCode_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + regionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + createTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + payload_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.eventTypeCode_ = eventTypeCode_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.regionName_ = regionName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.createTime_ = createTime_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + if (serverNameBuilder_ == null) { + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.payload_ = payload_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance()) return this; + if (other.hasEventTypeCode()) { + setEventTypeCode(other.getEventTypeCode()); + } + if (other.hasRegionName()) { + setRegionName(other.getRegionName()); + } + if (other.hasCreateTime()) { + setCreateTime(other.getCreateTime()); + } + if (other.hasServerName()) { + mergeServerName(other.getServerName()); + } + if (other.hasPayload()) { + setPayload(other.getPayload()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEventTypeCode()) { + + return false; + } + if (!hasRegionName()) { + + return false; + } + if (!hasCreateTime()) { + + return false; + } + if (!hasServerName()) { + + return false; + } + if (!getServerName().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + eventTypeCode_ = input.readUInt32(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + regionName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + createTime_ = input.readUInt64(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServerName()) { + subBuilder.mergeFrom(getServerName()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerName(subBuilder.buildPartial()); + break; + } + case 42: { + bitField0_ |= 0x00000010; + payload_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required uint32 eventTypeCode = 1; + private int eventTypeCode_ ; + public boolean hasEventTypeCode() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getEventTypeCode() { + return eventTypeCode_; + } + public Builder setEventTypeCode(int value) { + bitField0_ |= 0x00000001; + eventTypeCode_ = value; + onChanged(); + return this; + } + public Builder clearEventTypeCode() { + bitField0_ = (bitField0_ & ~0x00000001); + eventTypeCode_ = 0; + onChanged(); + return this; + } + + // required bytes regionName = 2; + private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRegionName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getRegionName() { + return regionName_; + } + public Builder setRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + regionName_ = value; + onChanged(); + return this; + } + public Builder clearRegionName() { + bitField0_ = (bitField0_ & ~0x00000002); + regionName_ = getDefaultInstance().getRegionName(); + onChanged(); + return this; + } + + // required uint64 createTime = 3; + private long createTime_ ; + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getCreateTime() { + return createTime_; + } + public Builder setCreateTime(long value) { + bitField0_ |= 0x00000004; + createTime_ = value; + onChanged(); + return this; + } + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000004); + createTime_ = 0L; + onChanged(); + return this; + } + + // required .ServerName serverName = 4; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + if (serverNameBuilder_ == null) { + return serverName_; + } else { + return serverNameBuilder_.getMessage(); + } + } + public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverName_ = value; + onChanged(); + } else { + serverNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder setServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + serverName_ = builderForValue.build(); + onChanged(); + } else { + serverNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + serverName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); + } else { + serverName_ = value; + } + onChanged(); + } else { + serverNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getServerNameFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilder(); + } else { + return serverName_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // optional bytes payload = 5; + private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasPayload() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + public Builder setPayload(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + payload_ = value; + onChanged(); + return this; + } + public Builder clearPayload() { + bitField0_ = (bitField0_ & ~0x00000010); + payload_ = getDefaultInstance().getPayload(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionTransition) + } + + static { + defaultInstance = new RegionTransition(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionTransition) + } + + public interface SplitLogTaskOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .SplitLogTask.State state = 1; + boolean hasState(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState(); + + // required .ServerName serverName = 2; + boolean hasServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); + } + public static final class SplitLogTask extends + com.google.protobuf.GeneratedMessage + implements SplitLogTaskOrBuilder { + // Use SplitLogTask.newBuilder() to construct. + private SplitLogTask(Builder builder) { + super(builder); + } + private SplitLogTask(boolean noInit) {} + + private static final SplitLogTask defaultInstance; + public static SplitLogTask getDefaultInstance() { + return defaultInstance; + } + + public SplitLogTask getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable; + } + + public enum State + implements com.google.protobuf.ProtocolMessageEnum { + UNASSIGNED(0, 0), + OWNED(1, 1), + RESIGNED(2, 2), + DONE(3, 3), + ERR(4, 4), + ; + + public static final int UNASSIGNED_VALUE = 0; + public static final int OWNED_VALUE = 1; + public static final int RESIGNED_VALUE = 2; + public static final int DONE_VALUE = 3; + public static final int ERR_VALUE = 4; + + + public final int getNumber() { return value; } + + public static State valueOf(int value) { + switch (value) { + case 0: return UNASSIGNED; + case 1: return OWNED; + case 2: return RESIGNED; + case 3: return DONE; + case 4: return ERR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(0); + } + + private static final State[] VALUES = { + UNASSIGNED, OWNED, RESIGNED, DONE, ERR, + }; + + public static State valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private State(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:SplitLogTask.State) + } + + private int bitField0_; + // required .SplitLogTask.State state = 1; + public static final int STATE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_; + public boolean hasState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { + return state_; + } + + // required .ServerName serverName = 2; + public static final int SERVERNAME_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + return serverName_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + return serverName_; + } + + private void initFields() { + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasState()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, state_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, serverName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, state_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, serverName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) obj; + + boolean result = true; + result = result && (hasState() == other.hasState()); + if (hasState()) { + result = result && + (getState() == other.getState()); + } + result = result && (hasServerName() == other.hasServerName()); + if (hasServerName()) { + result = result && getServerName() + .equals(other.getServerName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasState()) { + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getState()); + } + if (hasServerName()) { + hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; + hash = (53 * hash) + getServerName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + bitField0_ = (bitField0_ & ~0x00000001); + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.state_ = state_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (serverNameBuilder_ == null) { + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance()) return this; + if (other.hasState()) { + setState(other.getState()); + } + if (other.hasServerName()) { + mergeServerName(other.getServerName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasState()) { + + return false; + } + if (!hasServerName()) { + + return false; + } + if (!getServerName().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + state_ = value; + } + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServerName()) { + subBuilder.mergeFrom(getServerName()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerName(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .SplitLogTask.State state = 1; + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + public boolean hasState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { + return state_; + } + public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + state_ = value; + onChanged(); + return this; + } + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000001); + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + onChanged(); + return this; + } + + // required .ServerName serverName = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + if (serverNameBuilder_ == null) { + return serverName_; + } else { + return serverNameBuilder_.getMessage(); + } + } + public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverName_ = value; + onChanged(); + } else { + serverNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + serverName_ = builderForValue.build(); + onChanged(); + } else { + serverNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + serverName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); + } else { + serverName_ = value; + } + onChanged(); + } else { + serverNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getServerNameFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilder(); + } else { + return serverName_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // @@protoc_insertion_point(builder_scope:SplitLogTask) + } + + static { + defaultInstance = new SplitLogTask(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitLogTask) + } + + public interface TableOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .Table.State state = 1 [default = ENABLED]; + boolean hasState(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState(); + } + public static final class Table extends + com.google.protobuf.GeneratedMessage + implements TableOrBuilder { + // Use Table.newBuilder() to construct. + private Table(Builder builder) { + super(builder); + } + private Table(boolean noInit) {} + + private static final Table defaultInstance; + public static Table getDefaultInstance() { + return defaultInstance; + } + + public Table getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable; + } + + public enum State + implements com.google.protobuf.ProtocolMessageEnum { + ENABLED(0, 0), + DISABLED(1, 1), + DISABLING(2, 2), + ENABLING(3, 3), + ; + + public static final int ENABLED_VALUE = 0; + public static final int DISABLED_VALUE = 1; + public static final int DISABLING_VALUE = 2; + public static final int ENABLING_VALUE = 3; + + + public final int getNumber() { return value; } + + public static State valueOf(int value) { + switch (value) { + case 0: return ENABLED; + case 1: return DISABLED; + case 2: return DISABLING; + case 3: return ENABLING; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor().getEnumTypes().get(0); + } + + private static final State[] VALUES = { + ENABLED, DISABLED, DISABLING, ENABLING, + }; + + public static State valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private State(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Table.State) + } + + private int bitField0_; + // required .Table.State state = 1 [default = ENABLED]; + public static final int STATE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_; + public boolean hasState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() { + return state_; + } + + private void initFields() { + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasState()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, state_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, state_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) obj; + + boolean result = true; + result = result && (hasState() == other.hasState()); + if (hasState()) { + result = result && + (getState() == other.getState()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasState()) { + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getState()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.state_ = state_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance()) return this; + if (other.hasState()) { + setState(other.getState()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasState()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + state_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required .Table.State state = 1 [default = ENABLED]; + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; + public boolean hasState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() { + return state_; + } + public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + state_ = value; + onChanged(); + return this; + } + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000001); + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Table) + } + + static { + defaultInstance = new Table(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Table) + } + + public interface ReplicationPeerOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string clusterkey = 1; + boolean hasClusterkey(); + String getClusterkey(); + } + public static final class ReplicationPeer extends + com.google.protobuf.GeneratedMessage + implements ReplicationPeerOrBuilder { + // Use ReplicationPeer.newBuilder() to construct. + private ReplicationPeer(Builder builder) { + super(builder); + } + private ReplicationPeer(boolean noInit) {} + + private static final ReplicationPeer defaultInstance; + public static ReplicationPeer getDefaultInstance() { + return defaultInstance; + } + + public ReplicationPeer getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable; + } + + private int bitField0_; + // required string clusterkey = 1; + public static final int CLUSTERKEY_FIELD_NUMBER = 1; + private java.lang.Object clusterkey_; + public boolean hasClusterkey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getClusterkey() { + java.lang.Object ref = clusterkey_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + clusterkey_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getClusterkeyBytes() { + java.lang.Object ref = clusterkey_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + clusterkey_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + clusterkey_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClusterkey()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getClusterkeyBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getClusterkeyBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) obj; + + boolean result = true; + result = result && (hasClusterkey() == other.hasClusterkey()); + if (hasClusterkey()) { + result = result && getClusterkey() + .equals(other.getClusterkey()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClusterkey()) { + hash = (37 * hash) + CLUSTERKEY_FIELD_NUMBER; + hash = (53 * hash) + getClusterkey().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeerOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + clusterkey_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.clusterkey_ = clusterkey_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance()) return this; + if (other.hasClusterkey()) { + setClusterkey(other.getClusterkey()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClusterkey()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + clusterkey_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string clusterkey = 1; + private java.lang.Object clusterkey_ = ""; + public boolean hasClusterkey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getClusterkey() { + java.lang.Object ref = clusterkey_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + clusterkey_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setClusterkey(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + clusterkey_ = value; + onChanged(); + return this; + } + public Builder clearClusterkey() { + bitField0_ = (bitField0_ & ~0x00000001); + clusterkey_ = getDefaultInstance().getClusterkey(); + onChanged(); + return this; + } + void setClusterkey(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + clusterkey_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:ReplicationPeer) + } + + static { + defaultInstance = new ReplicationPeer(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicationPeer) + } + + public interface ReplicationStateOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ReplicationState.State state = 1; + boolean hasState(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState(); + } + public static final class ReplicationState extends + com.google.protobuf.GeneratedMessage + implements ReplicationStateOrBuilder { + // Use ReplicationState.newBuilder() to construct. + private ReplicationState(Builder builder) { + super(builder); + } + private ReplicationState(boolean noInit) {} + + private static final ReplicationState defaultInstance; + public static ReplicationState getDefaultInstance() { + return defaultInstance; + } + + public ReplicationState getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable; + } + + public enum State + implements com.google.protobuf.ProtocolMessageEnum { + ENABLED(0, 0), + DISABLED(1, 1), + ; + + public static final int ENABLED_VALUE = 0; + public static final int DISABLED_VALUE = 1; + + + public final int getNumber() { return value; } + + public static State valueOf(int value) { + switch (value) { + case 0: return ENABLED; + case 1: return DISABLED; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor().getEnumTypes().get(0); + } + + private static final State[] VALUES = { + ENABLED, DISABLED, + }; + + public static State valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private State(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:ReplicationState.State) + } + + private int bitField0_; + // required .ReplicationState.State state = 1; + public static final int STATE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_; + public boolean hasState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { + return state_; + } + + private void initFields() { + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasState()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, state_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, state_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) obj; + + boolean result = true; + result = result && (hasState() == other.hasState()); + if (hasState()) { + result = result && + (getState() == other.getState()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasState()) { + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getState()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationStateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.state_ = state_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance()) return this; + if (other.hasState()) { + setState(other.getState()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasState()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + state_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required .ReplicationState.State state = 1; + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + public boolean hasState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { + return state_; + } + public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + state_ = value; + onChanged(); + return this; + } + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000001); + state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ReplicationState) + } + + static { + defaultInstance = new ReplicationState(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicationState) + } + + public interface ReplicationHLogPositionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int64 position = 1; + boolean hasPosition(); + long getPosition(); + } + public static final class ReplicationHLogPosition extends + com.google.protobuf.GeneratedMessage + implements ReplicationHLogPositionOrBuilder { + // Use ReplicationHLogPosition.newBuilder() to construct. + private ReplicationHLogPosition(Builder builder) { + super(builder); + } + private ReplicationHLogPosition(boolean noInit) {} + + private static final ReplicationHLogPosition defaultInstance; + public static ReplicationHLogPosition getDefaultInstance() { + return defaultInstance; + } + + public ReplicationHLogPosition getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable; + } + + private int bitField0_; + // required int64 position = 1; + public static final int POSITION_FIELD_NUMBER = 1; + private long position_; + public boolean hasPosition() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getPosition() { + return position_; + } + + private void initFields() { + position_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPosition()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, position_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, position_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) obj; + + boolean result = true; + result = result && (hasPosition() == other.hasPosition()); + if (hasPosition()) { + result = result && (getPosition() + == other.getPosition()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPosition()) { + hash = (37 * hash) + POSITION_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getPosition()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPositionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + position_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.position_ = position_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance()) return this; + if (other.hasPosition()) { + setPosition(other.getPosition()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPosition()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + position_ = input.readInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required int64 position = 1; + private long position_ ; + public boolean hasPosition() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getPosition() { + return position_; + } + public Builder setPosition(long value) { + bitField0_ |= 0x00000001; + position_ = value; + onChanged(); + return this; + } + public Builder clearPosition() { + bitField0_ = (bitField0_ & ~0x00000001); + position_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ReplicationHLogPosition) + } + + static { + defaultInstance = new ReplicationHLogPosition(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicationHLogPosition) + } + + public interface ReplicationLockOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string lockOwner = 1; + boolean hasLockOwner(); + String getLockOwner(); + } + public static final class ReplicationLock extends + com.google.protobuf.GeneratedMessage + implements ReplicationLockOrBuilder { + // Use ReplicationLock.newBuilder() to construct. + private ReplicationLock(Builder builder) { + super(builder); + } + private ReplicationLock(boolean noInit) {} + + private static final ReplicationLock defaultInstance; + public static ReplicationLock getDefaultInstance() { + return defaultInstance; + } + + public ReplicationLock getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable; + } + + private int bitField0_; + // required string lockOwner = 1; + public static final int LOCKOWNER_FIELD_NUMBER = 1; + private java.lang.Object lockOwner_; + public boolean hasLockOwner() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getLockOwner() { + java.lang.Object ref = lockOwner_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + lockOwner_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getLockOwnerBytes() { + java.lang.Object ref = lockOwner_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + lockOwner_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + lockOwner_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLockOwner()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getLockOwnerBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getLockOwnerBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) obj; + + boolean result = true; + result = result && (hasLockOwner() == other.hasLockOwner()); + if (hasLockOwner()) { + result = result && getLockOwner() + .equals(other.getLockOwner()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLockOwner()) { + hash = (37 * hash) + LOCKOWNER_FIELD_NUMBER; + hash = (53 * hash) + getLockOwner().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLockOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lockOwner_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock build() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lockOwner_ = lockOwner_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance()) return this; + if (other.hasLockOwner()) { + setLockOwner(other.getLockOwner()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLockOwner()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + lockOwner_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string lockOwner = 1; + private java.lang.Object lockOwner_ = ""; + public boolean hasLockOwner() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getLockOwner() { + java.lang.Object ref = lockOwner_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + lockOwner_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setLockOwner(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + lockOwner_ = value; + onChanged(); + return this; + } + public Builder clearLockOwner() { + bitField0_ = (bitField0_ & ~0x00000001); + lockOwner_ = getDefaultInstance().getLockOwner(); + onChanged(); + return this; + } + void setLockOwner(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + lockOwner_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:ReplicationLock) + } + + static { + defaultInstance = new ReplicationLock(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicationLock) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RootRegionServer_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RootRegionServer_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Master_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Master_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ClusterUp_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ClusterUp_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionTransition_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionTransition_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitLogTask_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitLogTask_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Table_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Table_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicationPeer_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicationPeer_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicationState_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicationState_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicationHLogPosition_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicationHLogPosition_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicationLock_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicationLock_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\017ZooKeeper.proto\032\013hbase.proto\"/\n\020RootRe" + + "gionServer\022\033\n\006server\030\001 \002(\0132\013.ServerName\"" + + "%\n\006Master\022\033\n\006master\030\001 \002(\0132\013.ServerName\"\036" + + "\n\tClusterUp\022\021\n\tstartDate\030\001 \002(\t\"\203\001\n\020Regio" + + "nTransition\022\025\n\reventTypeCode\030\001 \002(\r\022\022\n\nre" + + "gionName\030\002 \002(\014\022\022\n\ncreateTime\030\003 \002(\004\022\037\n\nse" + + "rverName\030\004 \002(\0132\013.ServerName\022\017\n\007payload\030\005" + + " \001(\014\"\230\001\n\014SplitLogTask\022\"\n\005state\030\001 \002(\0162\023.S" + + "plitLogTask.State\022\037\n\nserverName\030\002 \002(\0132\013." + + "ServerName\"C\n\005State\022\016\n\nUNASSIGNED\020\000\022\t\n\005O", + "WNED\020\001\022\014\n\010RESIGNED\020\002\022\010\n\004DONE\020\003\022\007\n\003ERR\020\004\"" + + "n\n\005Table\022$\n\005state\030\001 \002(\0162\014.Table.State:\007E" + + "NABLED\"?\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLED" + + "\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLING\020\003\"%\n\017Repli" + + "cationPeer\022\022\n\nclusterkey\030\001 \002(\t\"^\n\020Replic" + + "ationState\022&\n\005state\030\001 \002(\0162\027.ReplicationS" + + "tate.State\"\"\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISA" + + "BLED\020\001\"+\n\027ReplicationHLogPosition\022\020\n\010pos" + + "ition\030\001 \002(\003\"$\n\017ReplicationLock\022\021\n\tlockOw" + + "ner\030\001 \002(\tBE\n*org.apache.hadoop.hbase.pro", + "tobuf.generatedB\017ZooKeeperProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RootRegionServer_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RootRegionServer_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RootRegionServer_descriptor, + new java.lang.String[] { "Server", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.Builder.class); + internal_static_Master_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Master_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Master_descriptor, + new java.lang.String[] { "Master", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class); + internal_static_ClusterUp_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_ClusterUp_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ClusterUp_descriptor, + new java.lang.String[] { "StartDate", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); + internal_static_RegionTransition_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_RegionTransition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionTransition_descriptor, + new java.lang.String[] { "EventTypeCode", "RegionName", "CreateTime", "ServerName", "Payload", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class); + internal_static_SplitLogTask_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_SplitLogTask_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitLogTask_descriptor, + new java.lang.String[] { "State", "ServerName", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); + internal_static_Table_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_Table_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Table_descriptor, + new java.lang.String[] { "State", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class); + internal_static_ReplicationPeer_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_ReplicationPeer_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicationPeer_descriptor, + new java.lang.String[] { "Clusterkey", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class); + internal_static_ReplicationState_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_ReplicationState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicationState_descriptor, + new java.lang.String[] { "State", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class); + internal_static_ReplicationHLogPosition_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_ReplicationHLogPosition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicationHLogPosition_descriptor, + new java.lang.String[] { "Position", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class); + internal_static_ReplicationLock_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_ReplicationLock_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicationLock_descriptor, + new java.lang.String[] { "LockOwner", }, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, + org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-rpc/src/main/protobuf/AccessControl.proto hbase-rpc/src/main/protobuf/AccessControl.proto new file mode 100644 index 0000000..ea77282 --- /dev/null +++ hbase-rpc/src/main/protobuf/AccessControl.proto @@ -0,0 +1,99 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "AccessControlProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message Permission { + enum Action { + READ = 0; + WRITE = 1; + EXEC = 2; + CREATE = 3; + ADMIN = 4; + } + repeated Action action = 1; + optional bytes table = 2; + optional bytes family = 3; + optional bytes qualifier = 4; +} + +message UserPermission { + required bytes user = 1; + required Permission permission = 2; +} + +/** + * Content of the /hbase/acl/ znode. + */ +message UserTablePermissions { + message UserPermissions { + required bytes user = 1; + repeated Permission permissions = 2; + } + + repeated UserPermissions permissions = 1; +} + +message GrantRequest { + required UserPermission permission = 1; +} + +message GrantResponse { +} + +message RevokeRequest { + required UserPermission permission = 1; + +} + +message RevokeResponse { +} + + +message UserPermissionsRequest { + required bytes table = 1; +} + +message UserPermissionsResponse { + repeated UserPermission permission = 1; +} + +message CheckPermissionsRequest { + repeated Permission permission = 1; +} + +message CheckPermissionsResponse { +} + +service AccessControlService { + rpc grant(GrantRequest) + returns (GrantResponse); + + rpc revoke(RevokeRequest) + returns (RevokeResponse); + + rpc getUserPermissions(UserPermissionsRequest) + returns (UserPermissionsResponse); + + rpc checkPermissions(CheckPermissionsRequest) + returns (CheckPermissionsResponse); +} diff --git hbase-rpc/src/main/protobuf/Admin.proto hbase-rpc/src/main/protobuf/Admin.proto new file mode 100644 index 0000000..b7d8549 --- /dev/null +++ hbase-rpc/src/main/protobuf/Admin.proto @@ -0,0 +1,255 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for Admin service. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "AdminProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +message GetRegionInfoRequest { + required RegionSpecifier region = 1; + optional bool compactionState = 2; +} + +message GetRegionInfoResponse { + required RegionInfo regionInfo = 1; + optional CompactionState compactionState = 2; + + enum CompactionState { + NONE = 0; + MINOR = 1; + MAJOR = 2; + MAJOR_AND_MINOR = 3; + } +} + +/** + * Get a list of store files for a set of column families in a particular region. + * If no column family is specified, get the store files for all column families. + */ +message GetStoreFileRequest { + required RegionSpecifier region = 1; + repeated bytes family = 2; +} + +message GetStoreFileResponse { + repeated string storeFile = 1; +} + +message GetOnlineRegionRequest { +} + +message GetOnlineRegionResponse { + repeated RegionInfo regionInfo = 1; +} + +message OpenRegionRequest { + repeated RegionOpenInfo openInfo = 1; + + message RegionOpenInfo { + required RegionInfo region = 1; + optional uint32 versionOfOfflineNode = 2; + } +} + +message OpenRegionResponse { + repeated RegionOpeningState openingState = 1; + + enum RegionOpeningState { + OPENED = 0; + ALREADY_OPENED = 1; + FAILED_OPENING = 2; + } +} + +/** + * Closes the specified region and will use or not use ZK during the close + * according to the specified flag. + */ +message CloseRegionRequest { + required RegionSpecifier region = 1; + optional uint32 versionOfClosingNode = 2; + optional bool transitionInZK = 3 [default = true]; + optional ServerName destinationServer = 4; +} + +message CloseRegionResponse { + required bool closed = 1; +} + +/** + * Flushes the MemStore of the specified region. + *

+ * This method is synchronous. + */ +message FlushRegionRequest { + required RegionSpecifier region = 1; + optional uint64 ifOlderThanTs = 2; +} + +message FlushRegionResponse { + required uint64 lastFlushTime = 1; + optional bool flushed = 2; +} + +/** + * Splits the specified region. + *

+ * This method currently flushes the region and then forces a compaction which + * will then trigger a split. The flush is done synchronously but the + * compaction is asynchronous. + */ +message SplitRegionRequest { + required RegionSpecifier region = 1; + optional bytes splitPoint = 2; +} + +message SplitRegionResponse { +} + +/** + * Compacts the specified region. Performs a major compaction if specified. + *

+ * This method is asynchronous. + */ +message CompactRegionRequest { + required RegionSpecifier region = 1; + optional bool major = 2; + optional bytes family = 3; +} + +message CompactRegionResponse { +} + +message UUID { + required uint64 leastSigBits = 1; + required uint64 mostSigBits = 2; +} + +// Protocol buffer version of HLog +message WALEntry { + required WALKey key = 1; + required WALEdit edit = 2; + + // Protocol buffer version of HLogKey + message WALKey { + required bytes encodedRegionName = 1; + required bytes tableName = 2; + required uint64 logSequenceNumber = 3; + required uint64 writeTime = 4; + optional UUID clusterId = 5; + } + + message WALEdit { + repeated bytes keyValueBytes = 1; + repeated FamilyScope familyScope = 2; + + enum ScopeType { + REPLICATION_SCOPE_LOCAL = 0; + REPLICATION_SCOPE_GLOBAL = 1; + } + + message FamilyScope { + required bytes family = 1; + required ScopeType scopeType = 2; + } + } +} + +/** + * Replicates the given entries. The guarantee is that the given entries + * will be durable on the slave cluster if this method returns without + * any exception. + * hbase.replication has to be set to true for this to work. + */ +message ReplicateWALEntryRequest { + repeated WALEntry entry = 1; +} + +message ReplicateWALEntryResponse { +} + +message RollWALWriterRequest { +} + +message RollWALWriterResponse { + // A list of encoded name of regions to flush + repeated bytes regionToFlush = 1; +} + +message StopServerRequest { + required string reason = 1; +} + +message StopServerResponse { +} + +message GetServerInfoRequest { +} + +message ServerInfo { + required ServerName serverName = 1; + optional uint32 webuiPort = 2; +} + +message GetServerInfoResponse { + required ServerInfo serverInfo = 1; +} + +service AdminService { + rpc getRegionInfo(GetRegionInfoRequest) + returns(GetRegionInfoResponse); + + rpc getStoreFile(GetStoreFileRequest) + returns(GetStoreFileResponse); + + rpc getOnlineRegion(GetOnlineRegionRequest) + returns(GetOnlineRegionResponse); + + rpc openRegion(OpenRegionRequest) + returns(OpenRegionResponse); + + rpc closeRegion(CloseRegionRequest) + returns(CloseRegionResponse); + + rpc flushRegion(FlushRegionRequest) + returns(FlushRegionResponse); + + rpc splitRegion(SplitRegionRequest) + returns(SplitRegionResponse); + + rpc compactRegion(CompactRegionRequest) + returns(CompactRegionResponse); + + rpc replicateWALEntry(ReplicateWALEntryRequest) + returns(ReplicateWALEntryResponse); + + rpc rollWALWriter(RollWALWriterRequest) + returns(RollWALWriterResponse); + + rpc getServerInfo(GetServerInfoRequest) + returns(GetServerInfoResponse); + + rpc stopServer(StopServerRequest) + returns(StopServerResponse); +} diff --git hbase-rpc/src/main/protobuf/Aggregate.proto hbase-rpc/src/main/protobuf/Aggregate.proto new file mode 100644 index 0000000..5707e14 --- /dev/null +++ hbase-rpc/src/main/protobuf/Aggregate.proto @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "AggregateProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "Client.proto"; + +message AggregateArgument { + /** The argument passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + */ + required string interpreterClassName = 1; + required Scan scan = 2; + optional bytes interpreterSpecificBytes = 3; +} + +message AggregateResponse { + /** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both firstPart and secondPart + * have defined values (and the secondPart is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + */ + repeated bytes firstPart = 1; + optional bytes secondPart = 2; +} + +/** Refer to the AggregateImplementation class for an overview of the + * AggregateService method implementations and their functionality. + */ +service AggregateService { + rpc getMax (AggregateArgument) returns (AggregateResponse); + rpc getMin (AggregateArgument) returns (AggregateResponse); + rpc getSum (AggregateArgument) returns (AggregateResponse); + rpc getRowNum (AggregateArgument) returns (AggregateResponse); + rpc getAvg (AggregateArgument) returns (AggregateResponse); + rpc getStd (AggregateArgument) returns (AggregateResponse); + rpc getMedian (AggregateArgument) returns (AggregateResponse); +} \ No newline at end of file diff --git hbase-rpc/src/main/protobuf/BulkDelete.proto hbase-rpc/src/main/protobuf/BulkDelete.proto new file mode 100644 index 0000000..a0e56dd --- /dev/null +++ hbase-rpc/src/main/protobuf/BulkDelete.proto @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated"; +option java_outer_classname = "BulkDeleteProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "Client.proto"; + +message BulkDeleteRequest { + required Scan scan = 1; + required DeleteType deleteType = 2; + optional uint64 timestamp = 3; + required uint32 rowBatchSize = 4; + + enum DeleteType { + ROW = 0; + FAMILY = 1; + COLUMN = 2; + VERSION = 3; + } +} + +message BulkDeleteResponse { + required uint64 rowsDeleted = 1; + optional uint64 versionsDeleted = 2; +} + +service BulkDeleteService { + rpc delete(BulkDeleteRequest) + returns (BulkDeleteResponse); +} \ No newline at end of file diff --git hbase-rpc/src/main/protobuf/Client.proto hbase-rpc/src/main/protobuf/Client.proto new file mode 100644 index 0000000..dd34998 --- /dev/null +++ hbase-rpc/src/main/protobuf/Client.proto @@ -0,0 +1,385 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for Client service. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ClientProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; +import "Comparator.proto"; + +/** + * Container for a list of column qualifier names of a family. + */ +message Column { + required bytes family = 1; + repeated bytes qualifier = 2; +} + +/** + * The protocol buffer version of Get + */ +message Get { + required bytes row = 1; + repeated Column column = 2; + repeated NameBytesPair attribute = 3; + optional uint64 lockId = 4; + optional Filter filter = 5; + optional TimeRange timeRange = 6; + optional uint32 maxVersions = 7 [default = 1]; + optional bool cacheBlocks = 8 [default = true]; + optional uint32 storeLimit = 9; + optional uint32 storeOffset = 10; +} + +/** + * For performance reason, we don't use KeyValue + * here. We use the actual KeyValue bytes. + */ +message Result { + repeated bytes keyValueBytes = 1; +} + +/** + * The get request. Perform a single Get operation. + * Unless existenceOnly is specified, return all the requested data + * for the row that matches exactly, or the one that immediately + * precedes it if closestRowBefore is specified. + * + * If existenceOnly is set, only the existence will be returned. + */ +message GetRequest { + required RegionSpecifier region = 1; + required Get get = 2; + + // If the row to get doesn't exist, return the + // closest row before. + optional bool closestRowBefore = 3; + + // The result isn't asked for, just check for + // the existence. If specified, closestRowBefore + // will be ignored + optional bool existenceOnly = 4; +} + +message GetResponse { + optional Result result = 1; + + // used for Get to check existence only + optional bool exists = 2; +} + +/** + * Condition to check if the value of a given cell (row, + * family, qualifier) matches a value via a given comparator. + * + * Condition is used in check and mutate operations. + */ +message Condition { + required bytes row = 1; + required bytes family = 2; + required bytes qualifier = 3; + required CompareType compareType = 4; + required Comparator comparator = 5; +} + +/** + * A specific mutate inside a mutate request. + * It can be an append, increment, put or delete based + * on the mutate type. + */ +message Mutate { + required bytes row = 1; + required MutateType mutateType = 2; + repeated ColumnValue columnValue = 3; + repeated NameBytesPair attribute = 4; + optional uint64 timestamp = 5; + optional uint64 lockId = 6; + optional bool writeToWAL = 7 [default = true]; + + // For some mutate, result may be returned, in which case, + // time range can be specified for potential performance gain + optional TimeRange timeRange = 10; + + enum MutateType { + APPEND = 0; + INCREMENT = 1; + PUT = 2; + DELETE = 3; + } + + enum DeleteType { + DELETE_ONE_VERSION = 0; + DELETE_MULTIPLE_VERSIONS = 1; + DELETE_FAMILY = 2; + } + + message ColumnValue { + required bytes family = 1; + repeated QualifierValue qualifierValue = 2; + + message QualifierValue { + optional bytes qualifier = 1; + optional bytes value = 2; + optional uint64 timestamp = 3; + optional DeleteType deleteType = 4; + } + } +} + +/** + * The mutate request. Perform a single Mutate operation. + * + * Optionally, you can specify a condition. The mutate + * will take place only if the condition is met. Otherwise, + * the mutate will be ignored. In the response result, + * parameter processed is used to indicate if the mutate + * actually happened. + */ +message MutateRequest { + required RegionSpecifier region = 1; + required Mutate mutate = 2; + optional Condition condition = 3; +} + +message MutateResponse { + optional Result result = 1; + + // used for mutate to indicate processed only + optional bool processed = 2; +} + +/** + * Instead of get from a table, you can scan it with optional filters. + * You can specify the row key range, time range, the columns/families + * to scan and so on. + * + * This scan is used the first time in a scan request. The response of + * the initial scan will return a scanner id, which should be used to + * fetch result batches later on before it is closed. + */ +message Scan { + repeated Column column = 1; + repeated NameBytesPair attribute = 2; + optional bytes startRow = 3; + optional bytes stopRow = 4; + optional Filter filter = 5; + optional TimeRange timeRange = 6; + optional uint32 maxVersions = 7 [default = 1]; + optional bool cacheBlocks = 8 [default = true]; + optional uint32 batchSize = 9; + optional uint64 maxResultSize = 10; + optional uint32 storeLimit = 11; + optional uint32 storeOffset = 12; +} + +/** + * A scan request. Initially, it should specify a scan. Later on, you + * can use the scanner id returned to fetch result batches with a different + * scan request. + * + * The scanner will remain open if there are more results, and it's not + * asked to be closed explicitly. + * + * You can fetch the results and ask the scanner to be closed to save + * a trip if you are not interested in remaining results. + */ +message ScanRequest { + optional RegionSpecifier region = 1; + optional Scan scan = 2; + optional uint64 scannerId = 3; + optional uint32 numberOfRows = 4; + optional bool closeScanner = 5; + optional uint64 nextCallSeq = 6; +} + +/** + * The scan response. If there are no more results, moreResults will + * be false. If it is not specified, it means there are more. + */ +message ScanResponse { + repeated Result result = 1; + optional uint64 scannerId = 2; + optional bool moreResults = 3; + optional uint32 ttl = 4; +} + +message LockRowRequest { + required RegionSpecifier region = 1; + repeated bytes row = 2; +} + +message LockRowResponse { + required uint64 lockId = 1; + optional uint32 ttl = 2; +} + +message UnlockRowRequest { + required RegionSpecifier region = 1; + required uint64 lockId = 2; +} + +message UnlockRowResponse { +} + +/** + * Atomically bulk load multiple HFiles (say from different column families) + * into an open region. + */ +message BulkLoadHFileRequest { + required RegionSpecifier region = 1; + repeated FamilyPath familyPath = 2; + optional bool assignSeqNum = 3; + + message FamilyPath { + required bytes family = 1; + required string path = 2; + } +} + +message BulkLoadHFileResponse { + required bool loaded = 1; +} + +/** + * An individual coprocessor call. You must specify the protocol, + * the method, and the row to which the call will be executed. + * + * You can specify the configuration settings in the property list. + * + * The parameter list has the parameters used for the method. + * A parameter is a pair of parameter name and the binary parameter + * value. The name is the parameter class name. The value is the + * binary format of the parameter, for example, protocol buffer + * encoded value. + */ +message Exec { + required bytes row = 1; + required string protocolName = 2; + required string methodName = 3; + repeated NameStringPair property = 4; + repeated NameBytesPair parameter = 5; +} + + /** + * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} + * method using the registered protocol handlers. + * {@link CoprocessorProtocol} implementations must be registered via the + * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol( + * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} + * method before they are available. + */ +message ExecCoprocessorRequest { + required RegionSpecifier region = 1; + required Exec call = 2; +} + +message ExecCoprocessorResponse { + required NameBytesPair value = 1; +} + +message CoprocessorServiceCall { + required bytes row = 1; + required string serviceName = 2; + required string methodName = 3; + required bytes request = 4; +} + +message CoprocessorServiceRequest { + required RegionSpecifier region = 1; + required CoprocessorServiceCall call = 2; +} + +message CoprocessorServiceResponse { + required RegionSpecifier region = 1; + required NameBytesPair value = 2; +} + +/** + * An action that is part of MultiRequest. + * This is a union type - exactly one of the fields will be set. + */ +message MultiAction { + optional Mutate mutate = 1; + optional Get get = 2; + optional Exec exec = 3; +} + +/** + * An individual action result. The result will in the + * same order as the action in the request. If an action + * returns a value, it is set in value field. If it doesn't + * return anything, the result will be empty. If an action + * fails to execute due to any exception, the exception + * is returned as a stringified parameter. + */ +message ActionResult { + optional NameBytesPair value = 1; + optional NameBytesPair exception = 2; +} + +/** + * You can execute a list of actions on a given region in order. + * + * If it is a list of mutate actions, atomic can be set + * to make sure they can be processed atomically, just like + * RowMutations. + */ +message MultiRequest { + required RegionSpecifier region = 1; + repeated MultiAction action = 2; + optional bool atomic = 3; +} + +message MultiResponse { + repeated ActionResult result = 1; +} + + +service ClientService { + rpc get(GetRequest) + returns(GetResponse); + + rpc mutate(MutateRequest) + returns(MutateResponse); + + rpc scan(ScanRequest) + returns(ScanResponse); + + rpc lockRow(LockRowRequest) + returns(LockRowResponse); + + rpc unlockRow(UnlockRowRequest) + returns(UnlockRowResponse); + + rpc bulkLoadHFile(BulkLoadHFileRequest) + returns(BulkLoadHFileResponse); + + rpc execCoprocessor(ExecCoprocessorRequest) + returns(ExecCoprocessorResponse); + + rpc execService(CoprocessorServiceRequest) + returns(CoprocessorServiceResponse); + + rpc multi(MultiRequest) + returns(MultiResponse); +} diff --git hbase-rpc/src/main/protobuf/ClusterId.proto hbase-rpc/src/main/protobuf/ClusterId.proto new file mode 100644 index 0000000..80b7d0f --- /dev/null +++ hbase-rpc/src/main/protobuf/ClusterId.proto @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are shared throughout HBase + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ClusterIdProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +/** + * Content of the '/hbase/hbaseid', cluster id, znode. + * Also cluster of the ${HBASE_ROOTDIR}/hbase.id file. + */ +message ClusterId { + // This is the cluster id, a uuid as a String + required string clusterId = 1; +} diff --git hbase-rpc/src/main/protobuf/ClusterStatus.proto hbase-rpc/src/main/protobuf/ClusterStatus.proto new file mode 100644 index 0000000..73f20f5 --- /dev/null +++ hbase-rpc/src/main/protobuf/ClusterStatus.proto @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for ClustStatus + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ClusterStatusProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; +import "ClusterId.proto"; +import "FS.proto"; + +message RegionState { + required RegionInfo regionInfo = 1; + required State state = 2; + optional uint64 stamp = 3; + enum State { + OFFLINE = 0; // region is in an offline state + PENDING_OPEN = 1; // sent rpc to server to open but has not begun + OPENING = 2; // server has begun to open but not yet done + OPEN = 3; // server opened region and updated meta + PENDING_CLOSE = 4; // sent rpc to server to close but has not begun + CLOSING = 5; // server has begun to close but not yet done + CLOSED = 6; // server closed region and updated meta + SPLITTING = 7; // server started split of a region + SPLIT = 8; // server completed split of a region + } +} + +message RegionInTransition { + required RegionSpecifier spec = 1; + required RegionState regionState = 2; +} + +message LiveServerInfo { + required ServerName server = 1; + required ServerLoad serverLoad = 2; +} + +message ClusterStatus { + optional HBaseVersionFileContent hbaseVersion = 1; + repeated LiveServerInfo liveServers = 2; + repeated ServerName deadServers = 3; + repeated RegionInTransition regionsInTransition = 4; + optional ClusterId clusterId = 5; + repeated Coprocessor masterCoprocessors = 6; + optional ServerName master = 7; + repeated ServerName backupMasters = 8; + optional bool balancerOn = 9; +} diff --git hbase-rpc/src/main/protobuf/Comparator.proto hbase-rpc/src/main/protobuf/Comparator.proto new file mode 100644 index 0000000..d4c1682 --- /dev/null +++ hbase-rpc/src/main/protobuf/Comparator.proto @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for filters + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ComparatorProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +// This file contains protocol buffers that are used for comparators (e.g. in filters) + +message Comparator { + required string name = 1; + optional bytes serializedComparator = 2; +} + +message ByteArrayComparable { + optional bytes value = 1; +} + +message BinaryComparator { + required ByteArrayComparable comparable = 1; +} + +message BinaryPrefixComparator { + required ByteArrayComparable comparable = 1; +} + +message BitComparator { + required ByteArrayComparable comparable = 1; + required BitwiseOp bitwiseOp = 2; + + enum BitwiseOp { + AND = 1; + OR = 2; + XOR = 3; + } +} + +message NullComparator { +} + +message RegexStringComparator { + required string pattern = 1; + required int32 patternFlags = 2; + required string charset = 3; +} + +message SubstringComparator { + required string substr = 1; +} diff --git hbase-rpc/src/main/protobuf/Examples.proto hbase-rpc/src/main/protobuf/Examples.proto new file mode 100644 index 0000000..24fbad6 --- /dev/null +++ hbase-rpc/src/main/protobuf/Examples.proto @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated"; +option java_outer_classname = "ExampleProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message CountRequest { +} + +message CountResponse { + required int64 count = 1 [default = 0]; +} + +service RowCountService { + rpc getRowCount(CountRequest) + returns (CountResponse); + rpc getKeyValueCount(CountRequest) + returns (CountResponse); +} \ No newline at end of file diff --git hbase-rpc/src/main/protobuf/FS.proto hbase-rpc/src/main/protobuf/FS.proto new file mode 100644 index 0000000..ca819e3 --- /dev/null +++ hbase-rpc/src/main/protobuf/FS.proto @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are written into the filesystem + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "FSProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +/** + * The ${HBASE_ROOTDIR}/hbase.version file content + */ +message HBaseVersionFileContent { + required string version = 1; +} + +/** + * Reference file content used when we split an hfile under a region. + */ +message Reference { + required bytes splitkey = 1; + enum Range { + TOP = 0; + BOTTOM = 1; + } + required Range range = 2; +} + diff --git hbase-rpc/src/main/protobuf/Filter.proto hbase-rpc/src/main/protobuf/Filter.proto new file mode 100644 index 0000000..de79106 --- /dev/null +++ hbase-rpc/src/main/protobuf/Filter.proto @@ -0,0 +1,152 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for filters + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "FilterProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; +import "Comparator.proto"; + +message ColumnCountGetFilter { + required int32 limit = 1; +} + +message ColumnPaginationFilter { + required int32 limit = 1; + optional int32 offset = 2; +} + +message ColumnPrefixFilter { + required bytes prefix = 1; +} + +message ColumnRangeFilter { + optional bytes minColumn = 1; + optional bool minColumnInclusive = 2; + optional bytes maxColumn = 3; + optional bool maxColumnInclusive = 4; +} + +message CompareFilter { + required CompareType compareOp = 1; + optional Comparator comparator = 2; +} + +message DependentColumnFilter { + required CompareFilter compareFilter = 1; + optional bytes columnFamily = 2; + optional bytes columnQualifier = 3; + optional bool dropDependentColumn = 4; +} + +message FamilyFilter { + required CompareFilter compareFilter = 1; +} + +message FilterList { + required Operator operator = 1; + repeated Filter filters = 2; + + enum Operator { + MUST_PASS_ALL = 1; + MUST_PASS_ONE = 2; + } +} + +message FilterWrapper { + required Filter filter = 1; +} + +message FirstKeyOnlyFilter { +} + +message FirstKeyValueMatchingQualifiersFilter { + repeated bytes qualifiers = 1; +} + +message FuzzyRowFilter { + repeated BytesBytesPair fuzzyKeysData = 1; +} + +message InclusiveStopFilter { + optional bytes stopRowKey = 1; +} + +message KeyOnlyFilter { + required bool lenAsVal = 1; +} + +message MultipleColumnPrefixFilter { + repeated bytes sortedPrefixes = 1; +} + +message PageFilter { + required int64 pageSize = 1; +} + +message PrefixFilter { + optional bytes prefix = 1; +} + +message QualifierFilter { + required CompareFilter compareFilter = 1; +} + +message RandomRowFilter { + required float chance = 1; +} + +message RowFilter { + required CompareFilter compareFilter = 1; +} + +message SingleColumnValueExcludeFilter { + required SingleColumnValueFilter singleColumnValueFilter = 1; +} + +message SingleColumnValueFilter { + optional bytes columnFamily = 1; + optional bytes columnQualifier = 2; + required CompareType compareOp = 3; + required Comparator comparator = 4; + optional bool foundColumn = 5; + optional bool matchedColumn = 6; + optional bool filterIfMissing = 7; + optional bool latestVersionOnly = 8; +} + +message SkipFilter { + required Filter filter = 1; +} + +message TimestampsFilter { + repeated int64 timestamps = 1; +} + +message ValueFilter { + required CompareFilter compareFilter = 1; +} + +message WhileMatchFilter { + required Filter filter = 1; +} diff --git hbase-rpc/src/main/protobuf/LoadBalancer.proto hbase-rpc/src/main/protobuf/LoadBalancer.proto new file mode 100644 index 0000000..df1c049 --- /dev/null +++ hbase-rpc/src/main/protobuf/LoadBalancer.proto @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers to represent the state of the load balancer. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "LoadBalancerProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message LoadBalancerState { + optional bool balancerOn = 1; +} diff --git hbase-rpc/src/main/protobuf/Master.proto hbase-rpc/src/main/protobuf/Master.proto new file mode 100644 index 0000000..e2ff35b --- /dev/null +++ hbase-rpc/src/main/protobuf/Master.proto @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for protocols implemented by the master. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "MasterProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message IsMasterRunningRequest { +} + +message IsMasterRunningResponse { + required bool isMasterRunning = 1; +} + +service MasterService { + /** return true if master is available */ + rpc isMasterRunning(IsMasterRunningRequest) + returns(IsMasterRunningResponse); +} diff --git hbase-rpc/src/main/protobuf/MasterAdmin.proto hbase-rpc/src/main/protobuf/MasterAdmin.proto new file mode 100644 index 0000000..dc62bb4 --- /dev/null +++ hbase-rpc/src/main/protobuf/MasterAdmin.proto @@ -0,0 +1,283 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for MasterAdminProtocol. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "MasterAdminProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; +import "Client.proto"; + +/* Column-level protobufs */ + +message AddColumnRequest { + required bytes tableName = 1; + required ColumnFamilySchema columnFamilies = 2; +} + +message AddColumnResponse { +} + +message DeleteColumnRequest { + required bytes tableName = 1; + required bytes columnName = 2; +} + +message DeleteColumnResponse { +} + +message ModifyColumnRequest { + required bytes tableName = 1; + required ColumnFamilySchema columnFamilies = 2; +} + +message ModifyColumnResponse { +} + +/* Region-level Protos */ + +message MoveRegionRequest { + required RegionSpecifier region = 1; + optional ServerName destServerName = 2; +} + +message MoveRegionResponse { +} + +message AssignRegionRequest { + required RegionSpecifier region = 1; +} + +message AssignRegionResponse { +} + +message UnassignRegionRequest { + required RegionSpecifier region = 1; + optional bool force = 2 [default = false]; +} + +message UnassignRegionResponse { +} + +message OfflineRegionRequest { + required RegionSpecifier region = 1; +} + +message OfflineRegionResponse { +} + +/* Table-level protobufs */ + +message CreateTableRequest { + required TableSchema tableSchema = 1; + repeated bytes splitKeys = 2; +} + +message CreateTableResponse { +} + +message DeleteTableRequest { + required bytes tableName = 1; +} + +message DeleteTableResponse { +} + +message EnableTableRequest { + required bytes tableName = 1; +} + +message EnableTableResponse { +} + +message DisableTableRequest { + required bytes tableName = 1; +} + +message DisableTableResponse { +} + +message ModifyTableRequest { + required bytes tableName = 1; + required TableSchema tableSchema = 2; +} + +message ModifyTableResponse { +} + +/* Cluster-level protobufs */ + + +message ShutdownRequest { +} + +message ShutdownResponse { +} + +message StopMasterRequest { +} + +message StopMasterResponse { +} + +message BalanceRequest { +} + +message BalanceResponse { + required bool balancerRan = 1; +} + +message SetBalancerRunningRequest { + required bool on = 1; + optional bool synchronous = 2; +} + +message SetBalancerRunningResponse { + optional bool prevBalanceValue = 1; +} + +message CatalogScanRequest { +} + +message CatalogScanResponse { + optional int32 scanResult = 1; +} + +message EnableCatalogJanitorRequest { + required bool enable = 1; +} + +message EnableCatalogJanitorResponse { + optional bool prevValue = 1; +} + +message IsCatalogJanitorEnabledRequest { +} + +message IsCatalogJanitorEnabledResponse { + required bool value = 1; +} + +service MasterAdminService { + /** Adds a column to the specified table. */ + rpc addColumn(AddColumnRequest) + returns(AddColumnResponse); + + /** Deletes a column from the specified table. Table must be disabled. */ + rpc deleteColumn(DeleteColumnRequest) + returns(DeleteColumnResponse); + + /** Modifies an existing column on the specified table. */ + rpc modifyColumn(ModifyColumnRequest) + returns(ModifyColumnResponse); + + /** Move the region region to the destination server. */ + rpc moveRegion(MoveRegionRequest) + returns(MoveRegionResponse); + + /** Assign a region to a server chosen at random. */ + rpc assignRegion(AssignRegionRequest) + returns(AssignRegionResponse); + + /** + * Unassign a region from current hosting regionserver. Region will then be + * assigned to a regionserver chosen at random. Region could be reassigned + * back to the same server. Use moveRegion if you want + * to control the region movement. + */ + rpc unassignRegion(UnassignRegionRequest) + returns(UnassignRegionResponse); + + /** + * Offline a region from the assignment manager's in-memory state. The + * region should be in a closed state and there will be no attempt to + * automatically reassign the region as in unassign. This is a special + * method, and should only be used by experts or hbck. + */ + rpc offlineRegion(OfflineRegionRequest) + returns(OfflineRegionResponse); + + /** Deletes a table */ + rpc deleteTable(DeleteTableRequest) + returns(DeleteTableResponse); + + /** Puts the table on-line (only needed if table has been previously taken offline) */ + rpc enableTable(EnableTableRequest) + returns(EnableTableResponse); + + /** Take table offline */ + rpc disableTable(DisableTableRequest) + returns(DisableTableResponse); + + /** Modify a table's metadata */ + rpc modifyTable(ModifyTableRequest) + returns(ModifyTableResponse); + + /** Creates a new table asynchronously */ + rpc createTable(CreateTableRequest) + returns(CreateTableResponse); + + /** Shutdown an HBase cluster. */ + rpc shutdown(ShutdownRequest) + returns(ShutdownResponse); + + /** Stop HBase Master only. Does not shutdown the cluster. */ + rpc stopMaster(StopMasterRequest) + returns(StopMasterResponse); + + /** + * Run the balancer. Will run the balancer and if regions to move, it will + * go ahead and do the reassignments. Can NOT run for various reasons. + * Check logs. + */ + rpc balance(BalanceRequest) + returns(BalanceResponse); + + /** + * Turn the load balancer on or off. + * If synchronous is true, it waits until current balance() call, if outstanding, to return. + */ + rpc setBalancerRunning(SetBalancerRunningRequest) + returns(SetBalancerRunningResponse); + + /** Get a run of the catalog janitor */ + rpc runCatalogScan(CatalogScanRequest) + returns(CatalogScanResponse); + + /** + * Enable the catalog janitor on or off. + */ + rpc enableCatalogJanitor(EnableCatalogJanitorRequest) + returns(EnableCatalogJanitorResponse); + + /** + * Query whether the catalog janitor is enabled. + */ + rpc isCatalogJanitorEnabled(IsCatalogJanitorEnabledRequest) + returns(IsCatalogJanitorEnabledResponse); + + /** + * Call a master coprocessor endpoint + */ + rpc execMasterService(CoprocessorServiceRequest) + returns(CoprocessorServiceResponse); +} diff --git hbase-rpc/src/main/protobuf/MasterMonitor.proto hbase-rpc/src/main/protobuf/MasterMonitor.proto new file mode 100644 index 0000000..38738e2 --- /dev/null +++ hbase-rpc/src/main/protobuf/MasterMonitor.proto @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for MasterMonitorProtocol. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "MasterMonitorProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; +import "ClusterStatus.proto"; + +message GetSchemaAlterStatusRequest { + required bytes tableName = 1; +} + +message GetSchemaAlterStatusResponse { + optional uint32 yetToUpdateRegions = 1; + optional uint32 totalRegions = 2; +} + +message GetTableDescriptorsRequest { + repeated string tableNames = 1; +} + +message GetTableDescriptorsResponse { + repeated TableSchema tableSchema = 1; +} + +message GetClusterStatusRequest { +} + +message GetClusterStatusResponse { + required ClusterStatus clusterStatus = 1; +} + +service MasterMonitorService { + /** Used by the client to get the number of regions that have received the updated schema */ + rpc getSchemaAlterStatus(GetSchemaAlterStatusRequest) + returns(GetSchemaAlterStatusResponse); + + /** Get list of TableDescriptors for requested tables. */ + rpc getTableDescriptors(GetTableDescriptorsRequest) + returns(GetTableDescriptorsResponse); + + /** Return cluster status. */ + rpc getClusterStatus(GetClusterStatusRequest) + returns(GetClusterStatusResponse); +} diff --git hbase-rpc/src/main/protobuf/MultiRowMutation.proto hbase-rpc/src/main/protobuf/MultiRowMutation.proto new file mode 100644 index 0000000..1aaefee --- /dev/null +++ hbase-rpc/src/main/protobuf/MultiRowMutation.proto @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import "Client.proto"; +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "MultiRowMutation"; +option java_generate_equals_and_hash = true; +option java_generic_services = true; +option optimize_for = SPEED; + +message MultiMutateRequest { + repeated Mutate mutationRequest = 1; +} + +message MultiMutateResponse { +} + +service MultiRowMutationService { + rpc mutateRows(MultiMutateRequest) + returns(MultiMutateResponse); +} diff --git hbase-rpc/src/main/protobuf/README.txt hbase-rpc/src/main/protobuf/README.txt new file mode 100644 index 0000000..f979619 --- /dev/null +++ hbase-rpc/src/main/protobuf/README.txt @@ -0,0 +1,27 @@ +These are the protobuf definition files used by hbase. The produced java +classes are generated into src/main/java/org/apache/hadoop/hbase/protobuf/generated +and then checked in. The reasoning is that they change infrequently. + +To regnerate the classes after making definition file changes, ensure first that +the protobuf protoc tool is in your $PATH (You may need to download it and build +it first; its part of the protobuf package obtainable from here: +http://code.google.com/p/protobuf/downloads/list). Then run the following (You +should be able to just copy and paste the below into a terminal and hit return +-- the protoc compiler runs fast): + + UNIX_PROTO_DIR=src/main/protobuf + JAVA_DIR=src/main/java/ + mkdir -p $JAVA_DIR 2> /dev/null + if which cygpath 2> /dev/null; then + PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR` + JAVA_DIR=`cygpath --windows $JAVA_DIR` + else + PROTO_DIR=$UNIX_PROTO_DIR + fi + for PROTO_FILE in $UNIX_PROTO_DIR/*.proto + do + protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE + done + +After you've done the above, check it in and then check it in (or post a patch +on a JIRA with your definition file changes and the generated files). diff --git hbase-rpc/src/main/protobuf/RPC.proto hbase-rpc/src/main/protobuf/RPC.proto new file mode 100644 index 0000000..bdd31d2 --- /dev/null +++ hbase-rpc/src/main/protobuf/RPC.proto @@ -0,0 +1,140 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Specification of (unsecure) HBase RPC: + * + * Client needs to set up a connection first to a server serving a certain + * HBase protocol (like ClientProtocol). Once the connection is set up, the + * client and server communicates on that channel for RPC requests/responses. + * The sections below describe the flow. + * + * As part of setting up a connection to a server, the client needs to send + * the ConnectionHeader header. At the data level, this looks like + * <"hrpc"-bytearray><'5'[byte]> + * + * For every RPC that the client makes it needs to send the following + * RpcRequestHeader and the RpcRequestBody. At the data level this looks like: + * + * + * + * + * On a success, the server's protobuf response looks like + * + * + * On a failure, the server's protobuf response looks like + * + * + * + * There is one special message that's sent from client to server - + * the Ping message. At the data level, this is just the bytes corresponding + * to integer -1. + */ + +import "Tracing.proto"; + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "RPCProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message UserInformation { + required string effectiveUser = 1; + optional string realUser = 2; +} + +message ConnectionHeader { + /** User Info beyond what is established at connection establishment + * (applies to secure HBase setup) + */ + optional UserInformation userInfo = 1; + /** Protocol name for next rpc layer + * the client created a proxy with this protocol name + */ + optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; +} + + +/** + * The RPC request header + */ +message RpcRequestHeader { + /** Monotonically increasing callId, mostly to keep track of RPCs */ + required uint32 callId = 1; + optional RPCTInfo tinfo = 2; +} +/** + * The RPC request body + */ +message RpcRequestBody { + /** Name of the RPC method */ + required string methodName = 1; + + /** protocol version of class declaring the called method */ + optional uint64 clientProtocolVersion = 2; + + /** Bytes corresponding to the client protobuf request. This is the actual + * bytes corresponding to the RPC request argument. + */ + optional bytes request = 3; + + /** Some metainfo about the request. Helps us to treat RPCs with + * different priorities. For now this is just the classname of the request + * proto object. + */ + optional string requestClassName = 4; +} + +/** + * The RPC response header + */ +message RpcResponseHeader { + /** Echo back the callId the client sent */ + required uint32 callId = 1; + /** Did the RPC execution encounter an error at the server */ + enum Status { + SUCCESS = 0; + ERROR = 1; + FATAL = 2; + } + required Status status = 2; +} +/** + * The RPC response body + */ +message RpcResponseBody { + /** Optional response bytes. This is the actual bytes corresponding to the + * return value of the invoked RPC. + */ + optional bytes response = 1; +} +/** + * At the RPC layer, this message is used to indicate + * the server side exception to the RPC client. + * + * HBase RPC client throws an exception indicated + * by exceptionName with the stackTrace. + */ +message RpcException { + /** Class name of the exception thrown from the server */ + required string exceptionName = 1; + + /** Exception stack trace from the server side */ + optional string stackTrace = 2; +} diff --git hbase-rpc/src/main/protobuf/RegionServerStatus.proto hbase-rpc/src/main/protobuf/RegionServerStatus.proto new file mode 100644 index 0000000..37c2c63 --- /dev/null +++ hbase-rpc/src/main/protobuf/RegionServerStatus.proto @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for RegionServerStatusProtocol. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "RegionServerStatusProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +message RegionServerStartupRequest { + /** Port number this regionserver is up on */ + required uint32 port = 1; + + /** This servers' startcode */ + required uint64 serverStartCode = 2; + + /** Current time of the region server in ms */ + required uint64 serverCurrentTime = 3; +} + +message RegionServerStartupResponse { + /** + * Configuration for the regionserver to use: e.g. filesystem, + * hbase rootdir, the hostname to use creating the RegionServer ServerName, + * etc + */ + repeated NameStringPair mapEntries = 1; +} + +message RegionServerReportRequest { + required ServerName server = 1; + + /** load the server is under */ + optional ServerLoad load = 2; +} + +message RegionServerReportResponse { +} + +message ReportRSFatalErrorRequest { + /** name of the server experiencing the error */ + required ServerName server = 1; + + /** informative text to expose in the master logs and UI */ + required string errorMessage = 2; +} + +message ReportRSFatalErrorResponse { +} + +message GetLastFlushedSequenceIdRequest { + /** region name */ + required bytes regionName = 1; +} + +message GetLastFlushedSequenceIdResponse { + /** the last HLog sequence id flushed from MemStore to HFile for the region */ + required uint64 lastFlushedSequenceId = 1; +} + +service RegionServerStatusService { + /** Called when a region server first starts. */ + rpc regionServerStartup(RegionServerStartupRequest) + returns(RegionServerStartupResponse); + + /** Called to report the load the RegionServer is under. */ + rpc regionServerReport(RegionServerReportRequest) + returns(RegionServerReportResponse); + + /** + * Called by a region server to report a fatal error that is causing it to + * abort. + */ + rpc reportRSFatalError(ReportRSFatalErrorRequest) + returns(ReportRSFatalErrorResponse); + + /** Called to get the sequence id of the last MemStore entry flushed to an + * HFile for a specified region. Used by the region server to speed up + * log splitting. */ + rpc getLastFlushedSequenceId(GetLastFlushedSequenceIdRequest) + returns(GetLastFlushedSequenceIdResponse); +} diff --git hbase-rpc/src/main/protobuf/Tracing.proto hbase-rpc/src/main/protobuf/Tracing.proto new file mode 100644 index 0000000..8e5babd --- /dev/null +++ hbase-rpc/src/main/protobuf/Tracing.proto @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "Tracing"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +//Used to pass through the information necessary to continue +//a trace after an RPC is made. All we need is the traceid +//(so we know the overarching trace this message is a part of), and +//the id of the current span when this message was sent, so we know +//what span caused the new span we will create when this message is received. +message RPCTInfo { + optional int64 traceId = 1; + optional int64 parentId = 2; +} diff --git hbase-rpc/src/main/protobuf/ZooKeeper.proto hbase-rpc/src/main/protobuf/ZooKeeper.proto new file mode 100644 index 0000000..69a5c01 --- /dev/null +++ hbase-rpc/src/main/protobuf/ZooKeeper.proto @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// ZNode data in hbase are serialized protobufs with a four byte +// 'magic' 'PBUF' prefix. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ZooKeeperProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +/** + * Content of the root-region-server znode. + */ +message RootRegionServer { + // The ServerName hosting the root region currently. + required ServerName server = 1; +} + +/** + * Content of the master znode. + */ +message Master { + // The ServerName of the current Master + required ServerName master = 1; +} + +/** + * Content of the '/hbase/shutdown', cluster state, znode. + */ +message ClusterUp { + // If this znode is present, cluster is up. Currently + // the data is cluster startDate. + required string startDate = 1; +} + +/** + * What we write under unassigned up in zookeeper as a region moves through + * open/close, etc., regions. Details a region in transition. + */ +message RegionTransition { + // Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode() + required uint32 eventTypeCode = 1; + // Full regionname in bytes + required bytes regionName = 2; + required uint64 createTime = 3; + // The region server where the transition will happen or is happening + required ServerName serverName = 4; + optional bytes payload = 5; +} + +/** + * WAL SplitLog directory znodes have this for content. Used doing distributed + * WAL splitting. Holds current state and name of server that originated split. + */ +message SplitLogTask { + enum State { + UNASSIGNED = 0; + OWNED = 1; + RESIGNED = 2; + DONE = 3; + ERR = 4; + } + required State state = 1; + required ServerName serverName = 2; +} + +/** + * The znode that holds state of table. + */ +message Table { + // Table's current state + enum State { + ENABLED = 0; + DISABLED = 1; + DISABLING = 2; + ENABLING = 3; + } + // This is the table's state. If no znode for a table, + // its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class + // for more. + required State state = 1 [default = ENABLED]; +} + +/** + * Used by replication. Holds a replication peer key. + */ +message ReplicationPeer { + // clusterKey is the concatenation of the slave cluster's + // hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent + required string clusterkey = 1; +} + +/** + * Used by replication. Holds whether enabled or disabled + */ +message ReplicationState { + enum State { + ENABLED = 0; + DISABLED = 1; + } + required State state = 1; +} + +/** + * Used by replication. Holds the current position in an HLog file. + */ +message ReplicationHLogPosition { + required int64 position = 1; +} + +/** + * Used by replication. Used to lock a region server during failover. + */ +message ReplicationLock { + required string lockOwner = 1; +} diff --git hbase-rpc/src/main/protobuf/hbase.proto hbase-rpc/src/main/protobuf/hbase.proto new file mode 100644 index 0000000..f6bc51e --- /dev/null +++ hbase-rpc/src/main/protobuf/hbase.proto @@ -0,0 +1,268 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are shared throughout HBase + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "HBaseProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +/** + * Table Schema + * Inspired by the rest TableSchema + */ +message TableSchema { + optional bytes name = 1; + message Attribute { + required bytes name = 1; + required bytes value = 2; + } + repeated Attribute attributes = 2; + repeated ColumnFamilySchema columnFamilies = 3; +} + +/** + * Column Family Schema + * Inspired by the rest ColumSchemaMessage + */ +message ColumnFamilySchema { + required bytes name = 1; + message Attribute { + required bytes name = 1; + required bytes value = 2; + } + repeated Attribute attributes = 2; +} + +/** + * Protocol buffer version of HRegionInfo. + */ +message RegionInfo { + required uint64 regionId = 1; + required bytes tableName = 2; + optional bytes startKey = 3; + optional bytes endKey = 4; + optional bool offline = 5; + optional bool split = 6; +} + +/** + * Container protocol buffer to specify a region. + * You can specify region by region name, or the hash + * of the region name, which is known as encoded + * region name. + */ +message RegionSpecifier { + required RegionSpecifierType type = 1; + required bytes value = 2; + + enum RegionSpecifierType { + // ,,. + REGION_NAME = 1; + + // hash of ,, + ENCODED_REGION_NAME = 2; + } +} + +message RegionLoad { + /** the region specifier */ + required RegionSpecifier regionSpecifier = 1; + + /** the number of stores for the region */ + optional uint32 stores = 2; + + /** the number of storefiles for the region */ + optional uint32 storefiles = 3; + + /** the total size of the store files for the region, uncompressed, in MB */ + optional uint32 storeUncompressedSizeMB = 4; + + /** the current total size of the store files for the region, in MB */ + optional uint32 storefileSizeMB = 5; + + /** the current size of the memstore for the region, in MB */ + optional uint32 memstoreSizeMB = 6; + + /** + * The current total size of root-level store file indexes for the region, + * in MB. The same as {@link #rootIndexSizeKB} but in MB. + */ + optional uint32 storefileIndexSizeMB = 7; + + /** the current total read requests made to region */ + optional uint64 readRequestsCount = 8; + + /** the current total write requests made to region */ + optional uint64 writeRequestsCount = 9; + + /** the total compacting key values in currently running compaction */ + optional uint64 totalCompactingKVs = 10; + + /** the completed count of key values in currently running compaction */ + optional uint64 currentCompactedKVs = 11; + + /** The current total size of root-level indexes for the region, in KB. */ + optional uint32 rootIndexSizeKB = 12; + + /** The total size of all index blocks, not just the root level, in KB. */ + optional uint32 totalStaticIndexSizeKB = 13; + + /** + * The total size of all Bloom filter blocks, not just loaded into the + * block cache, in KB. + */ + optional uint32 totalStaticBloomSizeKB = 14; + + /** Region-level coprocessors. */ + repeated Coprocessor coprocessors = 15; + + /** the most recent sequence Id from cache flush */ + optional uint64 completeSequenceId = 16; +} + +/* Server-level protobufs */ + +message ServerLoad { + /** Number of requests since last report. */ + optional uint32 numberOfRequests = 1; + + /** Total Number of requests from the start of the region server. */ + optional uint32 totalNumberOfRequests = 2; + + /** the amount of used heap, in MB. */ + optional uint32 usedHeapMB = 3; + + /** the maximum allowable size of the heap, in MB. */ + optional uint32 maxHeapMB = 4; + + /** Information on the load of individual regions. */ + repeated RegionLoad regionLoads = 5; + + /** + * Regionserver-level coprocessors, e.g., WALObserver implementations. + * Region-level coprocessors, on the other hand, are stored inside RegionLoad + * objects. + */ + repeated Coprocessor coprocessors = 6; + + /** + * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests) + * time is measured as the difference, measured in milliseconds, between the current time + * and midnight, January 1, 1970 UTC. + */ + optional uint64 reportStartTime = 7; + + /** + * Time when report was generated. + * time is measured as the difference, measured in milliseconds, between the current time + * and midnight, January 1, 1970 UTC. + */ + optional uint64 reportEndTime = 8; + + /** + * The port number that this region server is hosing an info server on. + */ + optional uint32 infoServerPort = 9; +} + +/** + * A range of time. Both from and to are Java time + * stamp in milliseconds. If you don't specify a time + * range, it means all time. By default, if not + * specified, from = 0, and to = Long.MAX_VALUE + */ +message TimeRange { + optional uint64 from = 1; + optional uint64 to = 2; +} + +message Filter { + required string name = 1; + optional bytes serializedFilter = 2; +} + +/* Comparison operators */ +enum CompareType { + LESS = 0; + LESS_OR_EQUAL = 1; + EQUAL = 2; + NOT_EQUAL = 3; + GREATER_OR_EQUAL = 4; + GREATER = 5; + NO_OP = 6; +} + +/** + * The type of the key in a KeyValue. + */ +enum KeyType { + MINIMUM = 0; + PUT = 4; + + DELETE = 8; + DELETE_COLUMN = 12; + DELETE_FAMILY = 14; + + // MAXIMUM is used when searching; you look from maximum on down. + MAXIMUM = 255; +} + +/** + * Protocol buffer version of KeyValue. + * It doesn't have those transient parameters + */ +message KeyValue { + required bytes row = 1; + required bytes family = 2; + required bytes qualifier = 3; + optional uint64 timestamp = 4; + optional KeyType keyType = 5; + optional bytes value = 6; +} + +/** + * Protocol buffer version of ServerName + */ +message ServerName { + required string hostName = 1; + optional uint32 port = 2; + optional uint64 startCode = 3; +} + +// Comment data structures + +message Coprocessor { + required string name = 1; +} + +message NameStringPair { + required string name = 1; + required string value = 2; +} + +message NameBytesPair { + required string name = 1; + optional bytes value = 2; +} + +message BytesBytesPair { + required bytes first = 1; + required bytes second = 2; +} diff --git hbase-server/pom.xml hbase-server/pom.xml index 0efa8f4..e4c28f6 100644 --- hbase-server/pom.xml +++ hbase-server/pom.xml @@ -272,6 +272,10 @@ org.apache.hbase + hbase-rpc + + + org.apache.hbase hbase-common test-jar diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java deleted file mode 100644 index b246e53..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java +++ /dev/null @@ -1,6553 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: AccessControl.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class AccessControlProtos { - private AccessControlProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface PermissionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Permission.Action action = 1; - java.util.List getActionList(); - int getActionCount(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index); - - // optional bytes table = 2; - boolean hasTable(); - com.google.protobuf.ByteString getTable(); - - // optional bytes family = 3; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // optional bytes qualifier = 4; - boolean hasQualifier(); - com.google.protobuf.ByteString getQualifier(); - } - public static final class Permission extends - com.google.protobuf.GeneratedMessage - implements PermissionOrBuilder { - // Use Permission.newBuilder() to construct. - private Permission(Builder builder) { - super(builder); - } - private Permission(boolean noInit) {} - - private static final Permission defaultInstance; - public static Permission getDefaultInstance() { - return defaultInstance; - } - - public Permission getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable; - } - - public enum Action - implements com.google.protobuf.ProtocolMessageEnum { - READ(0, 0), - WRITE(1, 1), - EXEC(2, 2), - CREATE(3, 3), - ADMIN(4, 4), - ; - - public static final int READ_VALUE = 0; - public static final int WRITE_VALUE = 1; - public static final int EXEC_VALUE = 2; - public static final int CREATE_VALUE = 3; - public static final int ADMIN_VALUE = 4; - - - public final int getNumber() { return value; } - - public static Action valueOf(int value) { - switch (value) { - case 0: return READ; - case 1: return WRITE; - case 2: return EXEC; - case 3: return CREATE; - case 4: return ADMIN; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Action findValueByNumber(int number) { - return Action.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor().getEnumTypes().get(0); - } - - private static final Action[] VALUES = { - READ, WRITE, EXEC, CREATE, ADMIN, - }; - - public static Action valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Action(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Permission.Action) - } - - private int bitField0_; - // repeated .Permission.Action action = 1; - public static final int ACTION_FIELD_NUMBER = 1; - private java.util.List action_; - public java.util.List getActionList() { - return action_; - } - public int getActionCount() { - return action_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) { - return action_.get(index); - } - - // optional bytes table = 2; - public static final int TABLE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString table_; - public boolean hasTable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTable() { - return table_; - } - - // optional bytes family = 3; - public static final int FAMILY_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // optional bytes qualifier = 4; - public static final int QUALIFIER_FIELD_NUMBER = 4; - private com.google.protobuf.ByteString qualifier_; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - private void initFields() { - action_ = java.util.Collections.emptyList(); - table_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < action_.size(); i++) { - output.writeEnum(1, action_.get(i).getNumber()); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(2, table_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(3, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, qualifier_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < action_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeEnumSizeNoTag(action_.get(i).getNumber()); - } - size += dataSize; - size += 1 * action_.size(); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, table_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, qualifier_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) obj; - - boolean result = true; - result = result && getActionList() - .equals(other.getActionList()); - result = result && (hasTable() == other.hasTable()); - if (hasTable()) { - result = result && getTable() - .equals(other.getTable()); - } - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getActionCount() > 0) { - hash = (37 * hash) + ACTION_FIELD_NUMBER; - hash = (53 * hash) + hashEnumList(getActionList()); - } - if (hasTable()) { - hash = (37 * hash) + TABLE_FIELD_NUMBER; - hash = (53 * hash) + getTable().hashCode(); - } - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - action_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - table_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - action_ = java.util.Collections.unmodifiableList(action_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.action_ = action_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.table_ = table_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000004; - } - result.qualifier_ = qualifier_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) return this; - if (!other.action_.isEmpty()) { - if (action_.isEmpty()) { - action_ = other.action_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureActionIsMutable(); - action_.addAll(other.action_); - } - onChanged(); - } - if (other.hasTable()) { - setTable(other.getTable()); - } - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addAction(value); - } - break; - } - case 10: { - int length = input.readRawVarint32(); - int oldLimit = input.pushLimit(length); - while(input.getBytesUntilLimit() > 0) { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addAction(value); - } - } - input.popLimit(oldLimit); - break; - } - case 18: { - bitField0_ |= 0x00000002; - table_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - family_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - qualifier_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Permission.Action action = 1; - private java.util.List action_ = - java.util.Collections.emptyList(); - private void ensureActionIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - action_ = new java.util.ArrayList(action_); - bitField0_ |= 0x00000001; - } - } - public java.util.List getActionList() { - return java.util.Collections.unmodifiableList(action_); - } - public int getActionCount() { - return action_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) { - return action_.get(index); - } - public Builder setAction( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.set(index, value); - onChanged(); - return this; - } - public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.add(value); - onChanged(); - return this; - } - public Builder addAllAction( - java.lang.Iterable values) { - ensureActionIsMutable(); - super.addAll(values, action_); - onChanged(); - return this; - } - public Builder clearAction() { - action_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // optional bytes table = 2; - private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTable() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTable() { - return table_; - } - public Builder setTable(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - table_ = value; - onChanged(); - return this; - } - public Builder clearTable() { - bitField0_ = (bitField0_ & ~0x00000002); - table_ = getDefaultInstance().getTable(); - onChanged(); - return this; - } - - // optional bytes family = 3; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000004); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // optional bytes qualifier = 4; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - qualifier_ = value; - onChanged(); - return this; - } - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000008); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Permission) - } - - static { - defaultInstance = new Permission(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Permission) - } - - public interface UserPermissionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes user = 1; - boolean hasUser(); - com.google.protobuf.ByteString getUser(); - - // required .Permission permission = 2; - boolean hasPermission(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder(); - } - public static final class UserPermission extends - com.google.protobuf.GeneratedMessage - implements UserPermissionOrBuilder { - // Use UserPermission.newBuilder() to construct. - private UserPermission(Builder builder) { - super(builder); - } - private UserPermission(boolean noInit) {} - - private static final UserPermission defaultInstance; - public static UserPermission getDefaultInstance() { - return defaultInstance; - } - - public UserPermission getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable; - } - - private int bitField0_; - // required bytes user = 1; - public static final int USER_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString user_; - public boolean hasUser() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getUser() { - return user_; - } - - // required .Permission permission = 2; - public static final int PERMISSION_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_; - public boolean hasPermission() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() { - return permission_; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() { - return permission_; - } - - private void initFields() { - user_ = com.google.protobuf.ByteString.EMPTY; - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasUser()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasPermission()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, user_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, permission_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, user_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, permission_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) obj; - - boolean result = true; - result = result && (hasUser() == other.hasUser()); - if (hasUser()) { - result = result && getUser() - .equals(other.getUser()); - } - result = result && (hasPermission() == other.hasPermission()); - if (hasPermission()) { - result = result && getPermission() - .equals(other.getPermission()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasUser()) { - hash = (37 * hash) + USER_FIELD_NUMBER; - hash = (53 * hash) + getUser().hashCode(); - } - if (hasPermission()) { - hash = (37 * hash) + PERMISSION_FIELD_NUMBER; - hash = (53 * hash) + getPermission().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - user_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (permissionBuilder_ == null) { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); - } else { - permissionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.user_ = user_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (permissionBuilder_ == null) { - result.permission_ = permission_; - } else { - result.permission_ = permissionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) return this; - if (other.hasUser()) { - setUser(other.getUser()); - } - if (other.hasPermission()) { - mergePermission(other.getPermission()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasUser()) { - - return false; - } - if (!hasPermission()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - user_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); - if (hasPermission()) { - subBuilder.mergeFrom(getPermission()); - } - input.readMessage(subBuilder, extensionRegistry); - setPermission(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes user = 1; - private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasUser() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getUser() { - return user_; - } - public Builder setUser(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - user_ = value; - onChanged(); - return this; - } - public Builder clearUser() { - bitField0_ = (bitField0_ & ~0x00000001); - user_ = getDefaultInstance().getUser(); - onChanged(); - return this; - } - - // required .Permission permission = 2; - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_; - public boolean hasPermission() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() { - if (permissionBuilder_ == null) { - return permission_; - } else { - return permissionBuilder_.getMessage(); - } - } - public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - permission_ = value; - onChanged(); - } else { - permissionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setPermission( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionBuilder_ == null) { - permission_ = builderForValue.build(); - onChanged(); - } else { - permissionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) { - permission_ = - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(permission_).mergeFrom(value).buildPartial(); - } else { - permission_ = value; - } - onChanged(); - } else { - permissionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearPermission() { - if (permissionBuilder_ == null) { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance(); - onChanged(); - } else { - permissionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getPermissionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() { - if (permissionBuilder_ != null) { - return permissionBuilder_.getMessageOrBuilder(); - } else { - return permission_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> - getPermissionFieldBuilder() { - if (permissionBuilder_ == null) { - permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>( - permission_, - getParentForChildren(), - isClean()); - permission_ = null; - } - return permissionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:UserPermission) - } - - static { - defaultInstance = new UserPermission(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UserPermission) - } - - public interface UserTablePermissionsOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .UserTablePermissions.UserPermissions permissions = 1; - java.util.List - getPermissionsList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index); - int getPermissionsCount(); - java.util.List - getPermissionsOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( - int index); - } - public static final class UserTablePermissions extends - com.google.protobuf.GeneratedMessage - implements UserTablePermissionsOrBuilder { - // Use UserTablePermissions.newBuilder() to construct. - private UserTablePermissions(Builder builder) { - super(builder); - } - private UserTablePermissions(boolean noInit) {} - - private static final UserTablePermissions defaultInstance; - public static UserTablePermissions getDefaultInstance() { - return defaultInstance; - } - - public UserTablePermissions getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable; - } - - public interface UserPermissionsOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes user = 1; - boolean hasUser(); - com.google.protobuf.ByteString getUser(); - - // repeated .Permission permissions = 2; - java.util.List - getPermissionsList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index); - int getPermissionsCount(); - java.util.List - getPermissionsOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( - int index); - } - public static final class UserPermissions extends - com.google.protobuf.GeneratedMessage - implements UserPermissionsOrBuilder { - // Use UserPermissions.newBuilder() to construct. - private UserPermissions(Builder builder) { - super(builder); - } - private UserPermissions(boolean noInit) {} - - private static final UserPermissions defaultInstance; - public static UserPermissions getDefaultInstance() { - return defaultInstance; - } - - public UserPermissions getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; - } - - private int bitField0_; - // required bytes user = 1; - public static final int USER_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString user_; - public boolean hasUser() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getUser() { - return user_; - } - - // repeated .Permission permissions = 2; - public static final int PERMISSIONS_FIELD_NUMBER = 2; - private java.util.List permissions_; - public java.util.List getPermissionsList() { - return permissions_; - } - public java.util.List - getPermissionsOrBuilderList() { - return permissions_; - } - public int getPermissionsCount() { - return permissions_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) { - return permissions_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( - int index) { - return permissions_.get(index); - } - - private void initFields() { - user_ = com.google.protobuf.ByteString.EMPTY; - permissions_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasUser()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, user_); - } - for (int i = 0; i < permissions_.size(); i++) { - output.writeMessage(2, permissions_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, user_); - } - for (int i = 0; i < permissions_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, permissions_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) obj; - - boolean result = true; - result = result && (hasUser() == other.hasUser()); - if (hasUser()) { - result = result && getUser() - .equals(other.getUser()); - } - result = result && getPermissionsList() - .equals(other.getPermissionsList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasUser()) { - hash = (37 * hash) + USER_FIELD_NUMBER; - hash = (53 * hash) + getUser().hashCode(); - } - if (getPermissionsCount() > 0) { - hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER; - hash = (53 * hash) + getPermissionsList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionsFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - user_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (permissionsBuilder_ == null) { - permissions_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - permissionsBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.user_ = user_; - if (permissionsBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - permissions_ = java.util.Collections.unmodifiableList(permissions_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.permissions_ = permissions_; - } else { - result.permissions_ = permissionsBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()) return this; - if (other.hasUser()) { - setUser(other.getUser()); - } - if (permissionsBuilder_ == null) { - if (!other.permissions_.isEmpty()) { - if (permissions_.isEmpty()) { - permissions_ = other.permissions_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensurePermissionsIsMutable(); - permissions_.addAll(other.permissions_); - } - onChanged(); - } - } else { - if (!other.permissions_.isEmpty()) { - if (permissionsBuilder_.isEmpty()) { - permissionsBuilder_.dispose(); - permissionsBuilder_ = null; - permissions_ = other.permissions_; - bitField0_ = (bitField0_ & ~0x00000002); - permissionsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getPermissionsFieldBuilder() : null; - } else { - permissionsBuilder_.addAllMessages(other.permissions_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasUser()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - user_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermissions(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes user = 1; - private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasUser() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getUser() { - return user_; - } - public Builder setUser(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - user_ = value; - onChanged(); - return this; - } - public Builder clearUser() { - bitField0_ = (bitField0_ & ~0x00000001); - user_ = getDefaultInstance().getUser(); - onChanged(); - return this; - } - - // repeated .Permission permissions = 2; - private java.util.List permissions_ = - java.util.Collections.emptyList(); - private void ensurePermissionsIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - permissions_ = new java.util.ArrayList(permissions_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionsBuilder_; - - public java.util.List getPermissionsList() { - if (permissionsBuilder_ == null) { - return java.util.Collections.unmodifiableList(permissions_); - } else { - return permissionsBuilder_.getMessageList(); - } - } - public int getPermissionsCount() { - if (permissionsBuilder_ == null) { - return permissions_.size(); - } else { - return permissionsBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) { - if (permissionsBuilder_ == null) { - return permissions_.get(index); - } else { - return permissionsBuilder_.getMessage(index); - } - } - public Builder setPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionsIsMutable(); - permissions_.set(index, value); - onChanged(); - } else { - permissionsBuilder_.setMessage(index, value); - } - return this; - } - public Builder setPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.set(index, builderForValue.build()); - onChanged(); - } else { - permissionsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionsIsMutable(); - permissions_.add(value); - onChanged(); - } else { - permissionsBuilder_.addMessage(value); - } - return this; - } - public Builder addPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionsIsMutable(); - permissions_.add(index, value); - onChanged(); - } else { - permissionsBuilder_.addMessage(index, value); - } - return this; - } - public Builder addPermissions( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.add(builderForValue.build()); - onChanged(); - } else { - permissionsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.add(index, builderForValue.build()); - onChanged(); - } else { - permissionsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllPermissions( - java.lang.Iterable values) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - super.addAll(values, permissions_); - onChanged(); - } else { - permissionsBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearPermissions() { - if (permissionsBuilder_ == null) { - permissions_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - permissionsBuilder_.clear(); - } - return this; - } - public Builder removePermissions(int index) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.remove(index); - onChanged(); - } else { - permissionsBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionsBuilder( - int index) { - return getPermissionsFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder( - int index) { - if (permissionsBuilder_ == null) { - return permissions_.get(index); } else { - return permissionsBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getPermissionsOrBuilderList() { - if (permissionsBuilder_ != null) { - return permissionsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(permissions_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder() { - return getPermissionsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder( - int index) { - return getPermissionsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); - } - public java.util.List - getPermissionsBuilderList() { - return getPermissionsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> - getPermissionsFieldBuilder() { - if (permissionsBuilder_ == null) { - permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>( - permissions_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - permissions_ = null; - } - return permissionsBuilder_; - } - - // @@protoc_insertion_point(builder_scope:UserTablePermissions.UserPermissions) - } - - static { - defaultInstance = new UserPermissions(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UserTablePermissions.UserPermissions) - } - - // repeated .UserTablePermissions.UserPermissions permissions = 1; - public static final int PERMISSIONS_FIELD_NUMBER = 1; - private java.util.List permissions_; - public java.util.List getPermissionsList() { - return permissions_; - } - public java.util.List - getPermissionsOrBuilderList() { - return permissions_; - } - public int getPermissionsCount() { - return permissions_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) { - return permissions_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( - int index) { - return permissions_.get(index); - } - - private void initFields() { - permissions_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getPermissionsCount(); i++) { - if (!getPermissions(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < permissions_.size(); i++) { - output.writeMessage(1, permissions_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < permissions_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, permissions_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) obj; - - boolean result = true; - result = result && getPermissionsList() - .equals(other.getPermissionsList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getPermissionsCount() > 0) { - hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER; - hash = (53 * hash) + getPermissionsList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissionsOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionsFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (permissionsBuilder_ == null) { - permissions_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - permissionsBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions(this); - int from_bitField0_ = bitField0_; - if (permissionsBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - permissions_ = java.util.Collections.unmodifiableList(permissions_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.permissions_ = permissions_; - } else { - result.permissions_ = permissionsBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance()) return this; - if (permissionsBuilder_ == null) { - if (!other.permissions_.isEmpty()) { - if (permissions_.isEmpty()) { - permissions_ = other.permissions_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensurePermissionsIsMutable(); - permissions_.addAll(other.permissions_); - } - onChanged(); - } - } else { - if (!other.permissions_.isEmpty()) { - if (permissionsBuilder_.isEmpty()) { - permissionsBuilder_.dispose(); - permissionsBuilder_ = null; - permissions_ = other.permissions_; - bitField0_ = (bitField0_ & ~0x00000001); - permissionsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getPermissionsFieldBuilder() : null; - } else { - permissionsBuilder_.addAllMessages(other.permissions_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getPermissionsCount(); i++) { - if (!getPermissions(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermissions(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .UserTablePermissions.UserPermissions permissions = 1; - private java.util.List permissions_ = - java.util.Collections.emptyList(); - private void ensurePermissionsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - permissions_ = new java.util.ArrayList(permissions_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder> permissionsBuilder_; - - public java.util.List getPermissionsList() { - if (permissionsBuilder_ == null) { - return java.util.Collections.unmodifiableList(permissions_); - } else { - return permissionsBuilder_.getMessageList(); - } - } - public int getPermissionsCount() { - if (permissionsBuilder_ == null) { - return permissions_.size(); - } else { - return permissionsBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) { - if (permissionsBuilder_ == null) { - return permissions_.get(index); - } else { - return permissionsBuilder_.getMessage(index); - } - } - public Builder setPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { - if (permissionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionsIsMutable(); - permissions_.set(index, value); - onChanged(); - } else { - permissionsBuilder_.setMessage(index, value); - } - return this; - } - public Builder setPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.set(index, builderForValue.build()); - onChanged(); - } else { - permissionsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { - if (permissionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionsIsMutable(); - permissions_.add(value); - onChanged(); - } else { - permissionsBuilder_.addMessage(value); - } - return this; - } - public Builder addPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) { - if (permissionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionsIsMutable(); - permissions_.add(index, value); - onChanged(); - } else { - permissionsBuilder_.addMessage(index, value); - } - return this; - } - public Builder addPermissions( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.add(builderForValue.build()); - onChanged(); - } else { - permissionsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addPermissions( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.add(index, builderForValue.build()); - onChanged(); - } else { - permissionsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllPermissions( - java.lang.Iterable values) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - super.addAll(values, permissions_); - onChanged(); - } else { - permissionsBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearPermissions() { - if (permissionsBuilder_ == null) { - permissions_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - permissionsBuilder_.clear(); - } - return this; - } - public Builder removePermissions(int index) { - if (permissionsBuilder_ == null) { - ensurePermissionsIsMutable(); - permissions_.remove(index); - onChanged(); - } else { - permissionsBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder getPermissionsBuilder( - int index) { - return getPermissionsFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder( - int index) { - if (permissionsBuilder_ == null) { - return permissions_.get(index); } else { - return permissionsBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getPermissionsOrBuilderList() { - if (permissionsBuilder_ != null) { - return permissionsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(permissions_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder() { - return getPermissionsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder( - int index) { - return getPermissionsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()); - } - public java.util.List - getPermissionsBuilderList() { - return getPermissionsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder> - getPermissionsFieldBuilder() { - if (permissionsBuilder_ == null) { - permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>( - permissions_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - permissions_ = null; - } - return permissionsBuilder_; - } - - // @@protoc_insertion_point(builder_scope:UserTablePermissions) - } - - static { - defaultInstance = new UserTablePermissions(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UserTablePermissions) - } - - public interface GrantRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .UserPermission permission = 1; - boolean hasPermission(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(); - } - public static final class GrantRequest extends - com.google.protobuf.GeneratedMessage - implements GrantRequestOrBuilder { - // Use GrantRequest.newBuilder() to construct. - private GrantRequest(Builder builder) { - super(builder); - } - private GrantRequest(boolean noInit) {} - - private static final GrantRequest defaultInstance; - public static GrantRequest getDefaultInstance() { - return defaultInstance; - } - - public GrantRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .UserPermission permission = 1; - public static final int PERMISSION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_; - public boolean hasPermission() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { - return permission_; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { - return permission_; - } - - private void initFields() { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPermission()) { - memoizedIsInitialized = 0; - return false; - } - if (!getPermission().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, permission_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, permission_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) obj; - - boolean result = true; - result = result && (hasPermission() == other.hasPermission()); - if (hasPermission()) { - result = result && getPermission() - .equals(other.getPermission()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPermission()) { - hash = (37 * hash) + PERMISSION_FIELD_NUMBER; - hash = (53 * hash) + getPermission().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (permissionBuilder_ == null) { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - } else { - permissionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (permissionBuilder_ == null) { - result.permission_ = permission_; - } else { - result.permission_ = permissionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance()) return this; - if (other.hasPermission()) { - mergePermission(other.getPermission()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPermission()) { - - return false; - } - if (!getPermission().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); - if (hasPermission()) { - subBuilder.mergeFrom(getPermission()); - } - input.readMessage(subBuilder, extensionRegistry); - setPermission(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .UserPermission permission = 1; - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; - public boolean hasPermission() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { - if (permissionBuilder_ == null) { - return permission_; - } else { - return permissionBuilder_.getMessage(); - } - } - public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - permission_ = value; - onChanged(); - } else { - permissionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setPermission( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { - if (permissionBuilder_ == null) { - permission_ = builderForValue.build(); - onChanged(); - } else { - permissionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) { - permission_ = - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(permission_).mergeFrom(value).buildPartial(); - } else { - permission_ = value; - } - onChanged(); - } else { - permissionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearPermission() { - if (permissionBuilder_ == null) { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - onChanged(); - } else { - permissionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getPermissionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { - if (permissionBuilder_ != null) { - return permissionBuilder_.getMessageOrBuilder(); - } else { - return permission_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> - getPermissionFieldBuilder() { - if (permissionBuilder_ == null) { - permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>( - permission_, - getParentForChildren(), - isClean()); - permission_ = null; - } - return permissionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GrantRequest) - } - - static { - defaultInstance = new GrantRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GrantRequest) - } - - public interface GrantResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class GrantResponse extends - com.google.protobuf.GeneratedMessage - implements GrantResponseOrBuilder { - // Use GrantResponse.newBuilder() to construct. - private GrantResponse(Builder builder) { - super(builder); - } - private GrantResponse(boolean noInit) {} - - private static final GrantResponse defaultInstance; - public static GrantResponse getDefaultInstance() { - return defaultInstance; - } - - public GrantResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:GrantResponse) - } - - static { - defaultInstance = new GrantResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GrantResponse) - } - - public interface RevokeRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .UserPermission permission = 1; - boolean hasPermission(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(); - } - public static final class RevokeRequest extends - com.google.protobuf.GeneratedMessage - implements RevokeRequestOrBuilder { - // Use RevokeRequest.newBuilder() to construct. - private RevokeRequest(Builder builder) { - super(builder); - } - private RevokeRequest(boolean noInit) {} - - private static final RevokeRequest defaultInstance; - public static RevokeRequest getDefaultInstance() { - return defaultInstance; - } - - public RevokeRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .UserPermission permission = 1; - public static final int PERMISSION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_; - public boolean hasPermission() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { - return permission_; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { - return permission_; - } - - private void initFields() { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPermission()) { - memoizedIsInitialized = 0; - return false; - } - if (!getPermission().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, permission_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, permission_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) obj; - - boolean result = true; - result = result && (hasPermission() == other.hasPermission()); - if (hasPermission()) { - result = result && getPermission() - .equals(other.getPermission()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPermission()) { - hash = (37 * hash) + PERMISSION_FIELD_NUMBER; - hash = (53 * hash) + getPermission().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (permissionBuilder_ == null) { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - } else { - permissionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (permissionBuilder_ == null) { - result.permission_ = permission_; - } else { - result.permission_ = permissionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance()) return this; - if (other.hasPermission()) { - mergePermission(other.getPermission()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPermission()) { - - return false; - } - if (!getPermission().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); - if (hasPermission()) { - subBuilder.mergeFrom(getPermission()); - } - input.readMessage(subBuilder, extensionRegistry); - setPermission(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .UserPermission permission = 1; - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; - public boolean hasPermission() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() { - if (permissionBuilder_ == null) { - return permission_; - } else { - return permissionBuilder_.getMessage(); - } - } - public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - permission_ = value; - onChanged(); - } else { - permissionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setPermission( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { - if (permissionBuilder_ == null) { - permission_ = builderForValue.build(); - onChanged(); - } else { - permissionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) { - permission_ = - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(permission_).mergeFrom(value).buildPartial(); - } else { - permission_ = value; - } - onChanged(); - } else { - permissionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearPermission() { - if (permissionBuilder_ == null) { - permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance(); - onChanged(); - } else { - permissionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getPermissionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() { - if (permissionBuilder_ != null) { - return permissionBuilder_.getMessageOrBuilder(); - } else { - return permission_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> - getPermissionFieldBuilder() { - if (permissionBuilder_ == null) { - permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>( - permission_, - getParentForChildren(), - isClean()); - permission_ = null; - } - return permissionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RevokeRequest) - } - - static { - defaultInstance = new RevokeRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RevokeRequest) - } - - public interface RevokeResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class RevokeResponse extends - com.google.protobuf.GeneratedMessage - implements RevokeResponseOrBuilder { - // Use RevokeResponse.newBuilder() to construct. - private RevokeResponse(Builder builder) { - super(builder); - } - private RevokeResponse(boolean noInit) {} - - private static final RevokeResponse defaultInstance; - public static RevokeResponse getDefaultInstance() { - return defaultInstance; - } - - public RevokeResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:RevokeResponse) - } - - static { - defaultInstance = new RevokeResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RevokeResponse) - } - - public interface UserPermissionsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes table = 1; - boolean hasTable(); - com.google.protobuf.ByteString getTable(); - } - public static final class UserPermissionsRequest extends - com.google.protobuf.GeneratedMessage - implements UserPermissionsRequestOrBuilder { - // Use UserPermissionsRequest.newBuilder() to construct. - private UserPermissionsRequest(Builder builder) { - super(builder); - } - private UserPermissionsRequest(boolean noInit) {} - - private static final UserPermissionsRequest defaultInstance; - public static UserPermissionsRequest getDefaultInstance() { - return defaultInstance; - } - - public UserPermissionsRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes table = 1; - public static final int TABLE_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString table_; - public boolean hasTable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTable() { - return table_; - } - - private void initFields() { - table_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTable()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, table_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, table_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) obj; - - boolean result = true; - result = result && (hasTable() == other.hasTable()); - if (hasTable()) { - result = result && getTable() - .equals(other.getTable()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTable()) { - hash = (37 * hash) + TABLE_FIELD_NUMBER; - hash = (53 * hash) + getTable().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - table_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.table_ = table_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance()) return this; - if (other.hasTable()) { - setTable(other.getTable()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTable()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - table_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes table = 1; - private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTable() { - return table_; - } - public Builder setTable(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - table_ = value; - onChanged(); - return this; - } - public Builder clearTable() { - bitField0_ = (bitField0_ & ~0x00000001); - table_ = getDefaultInstance().getTable(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UserPermissionsRequest) - } - - static { - defaultInstance = new UserPermissionsRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UserPermissionsRequest) - } - - public interface UserPermissionsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .UserPermission permission = 1; - java.util.List - getPermissionList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index); - int getPermissionCount(); - java.util.List - getPermissionOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( - int index); - } - public static final class UserPermissionsResponse extends - com.google.protobuf.GeneratedMessage - implements UserPermissionsResponseOrBuilder { - // Use UserPermissionsResponse.newBuilder() to construct. - private UserPermissionsResponse(Builder builder) { - super(builder); - } - private UserPermissionsResponse(boolean noInit) {} - - private static final UserPermissionsResponse defaultInstance; - public static UserPermissionsResponse getDefaultInstance() { - return defaultInstance; - } - - public UserPermissionsResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable; - } - - // repeated .UserPermission permission = 1; - public static final int PERMISSION_FIELD_NUMBER = 1; - private java.util.List permission_; - public java.util.List getPermissionList() { - return permission_; - } - public java.util.List - getPermissionOrBuilderList() { - return permission_; - } - public int getPermissionCount() { - return permission_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) { - return permission_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( - int index) { - return permission_.get(index); - } - - private void initFields() { - permission_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getPermissionCount(); i++) { - if (!getPermission(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < permission_.size(); i++) { - output.writeMessage(1, permission_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < permission_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, permission_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) obj; - - boolean result = true; - result = result && getPermissionList() - .equals(other.getPermissionList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getPermissionCount() > 0) { - hash = (37 * hash) + PERMISSION_FIELD_NUMBER; - hash = (53 * hash) + getPermissionList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (permissionBuilder_ == null) { - permission_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - permissionBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse(this); - int from_bitField0_ = bitField0_; - if (permissionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - permission_ = java.util.Collections.unmodifiableList(permission_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.permission_ = permission_; - } else { - result.permission_ = permissionBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()) return this; - if (permissionBuilder_ == null) { - if (!other.permission_.isEmpty()) { - if (permission_.isEmpty()) { - permission_ = other.permission_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensurePermissionIsMutable(); - permission_.addAll(other.permission_); - } - onChanged(); - } - } else { - if (!other.permission_.isEmpty()) { - if (permissionBuilder_.isEmpty()) { - permissionBuilder_.dispose(); - permissionBuilder_ = null; - permission_ = other.permission_; - bitField0_ = (bitField0_ & ~0x00000001); - permissionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getPermissionFieldBuilder() : null; - } else { - permissionBuilder_.addAllMessages(other.permission_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getPermissionCount(); i++) { - if (!getPermission(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermission(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .UserPermission permission = 1; - private java.util.List permission_ = - java.util.Collections.emptyList(); - private void ensurePermissionIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - permission_ = new java.util.ArrayList(permission_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_; - - public java.util.List getPermissionList() { - if (permissionBuilder_ == null) { - return java.util.Collections.unmodifiableList(permission_); - } else { - return permissionBuilder_.getMessageList(); - } - } - public int getPermissionCount() { - if (permissionBuilder_ == null) { - return permission_.size(); - } else { - return permissionBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) { - if (permissionBuilder_ == null) { - return permission_.get(index); - } else { - return permissionBuilder_.getMessage(index); - } - } - public Builder setPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionIsMutable(); - permission_.set(index, value); - onChanged(); - } else { - permissionBuilder_.setMessage(index, value); - } - return this; - } - public Builder setPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.set(index, builderForValue.build()); - onChanged(); - } else { - permissionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionIsMutable(); - permission_.add(value); - onChanged(); - } else { - permissionBuilder_.addMessage(value); - } - return this; - } - public Builder addPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionIsMutable(); - permission_.add(index, value); - onChanged(); - } else { - permissionBuilder_.addMessage(index, value); - } - return this; - } - public Builder addPermission( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.add(builderForValue.build()); - onChanged(); - } else { - permissionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.add(index, builderForValue.build()); - onChanged(); - } else { - permissionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllPermission( - java.lang.Iterable values) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - super.addAll(values, permission_); - onChanged(); - } else { - permissionBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearPermission() { - if (permissionBuilder_ == null) { - permission_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - permissionBuilder_.clear(); - } - return this; - } - public Builder removePermission(int index) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.remove(index); - onChanged(); - } else { - permissionBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder( - int index) { - return getPermissionFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder( - int index) { - if (permissionBuilder_ == null) { - return permission_.get(index); } else { - return permissionBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getPermissionOrBuilderList() { - if (permissionBuilder_ != null) { - return permissionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(permission_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder() { - return getPermissionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder( - int index) { - return getPermissionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()); - } - public java.util.List - getPermissionBuilderList() { - return getPermissionFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> - getPermissionFieldBuilder() { - if (permissionBuilder_ == null) { - permissionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>( - permission_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - permission_ = null; - } - return permissionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:UserPermissionsResponse) - } - - static { - defaultInstance = new UserPermissionsResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UserPermissionsResponse) - } - - public interface CheckPermissionsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Permission permission = 1; - java.util.List - getPermissionList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index); - int getPermissionCount(); - java.util.List - getPermissionOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( - int index); - } - public static final class CheckPermissionsRequest extends - com.google.protobuf.GeneratedMessage - implements CheckPermissionsRequestOrBuilder { - // Use CheckPermissionsRequest.newBuilder() to construct. - private CheckPermissionsRequest(Builder builder) { - super(builder); - } - private CheckPermissionsRequest(boolean noInit) {} - - private static final CheckPermissionsRequest defaultInstance; - public static CheckPermissionsRequest getDefaultInstance() { - return defaultInstance; - } - - public CheckPermissionsRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable; - } - - // repeated .Permission permission = 1; - public static final int PERMISSION_FIELD_NUMBER = 1; - private java.util.List permission_; - public java.util.List getPermissionList() { - return permission_; - } - public java.util.List - getPermissionOrBuilderList() { - return permission_; - } - public int getPermissionCount() { - return permission_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) { - return permission_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( - int index) { - return permission_.get(index); - } - - private void initFields() { - permission_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < permission_.size(); i++) { - output.writeMessage(1, permission_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < permission_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, permission_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) obj; - - boolean result = true; - result = result && getPermissionList() - .equals(other.getPermissionList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getPermissionCount() > 0) { - hash = (37 * hash) + PERMISSION_FIELD_NUMBER; - hash = (53 * hash) + getPermissionList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPermissionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (permissionBuilder_ == null) { - permission_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - permissionBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest(this); - int from_bitField0_ = bitField0_; - if (permissionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - permission_ = java.util.Collections.unmodifiableList(permission_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.permission_ = permission_; - } else { - result.permission_ = permissionBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance()) return this; - if (permissionBuilder_ == null) { - if (!other.permission_.isEmpty()) { - if (permission_.isEmpty()) { - permission_ = other.permission_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensurePermissionIsMutable(); - permission_.addAll(other.permission_); - } - onChanged(); - } - } else { - if (!other.permission_.isEmpty()) { - if (permissionBuilder_.isEmpty()) { - permissionBuilder_.dispose(); - permissionBuilder_ = null; - permission_ = other.permission_; - bitField0_ = (bitField0_ & ~0x00000001); - permissionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getPermissionFieldBuilder() : null; - } else { - permissionBuilder_.addAllMessages(other.permission_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addPermission(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Permission permission = 1; - private java.util.List permission_ = - java.util.Collections.emptyList(); - private void ensurePermissionIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - permission_ = new java.util.ArrayList(permission_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_; - - public java.util.List getPermissionList() { - if (permissionBuilder_ == null) { - return java.util.Collections.unmodifiableList(permission_); - } else { - return permissionBuilder_.getMessageList(); - } - } - public int getPermissionCount() { - if (permissionBuilder_ == null) { - return permission_.size(); - } else { - return permissionBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) { - if (permissionBuilder_ == null) { - return permission_.get(index); - } else { - return permissionBuilder_.getMessage(index); - } - } - public Builder setPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionIsMutable(); - permission_.set(index, value); - onChanged(); - } else { - permissionBuilder_.setMessage(index, value); - } - return this; - } - public Builder setPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.set(index, builderForValue.build()); - onChanged(); - } else { - permissionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionIsMutable(); - permission_.add(value); - onChanged(); - } else { - permissionBuilder_.addMessage(value); - } - return this; - } - public Builder addPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) { - if (permissionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePermissionIsMutable(); - permission_.add(index, value); - onChanged(); - } else { - permissionBuilder_.addMessage(index, value); - } - return this; - } - public Builder addPermission( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.add(builderForValue.build()); - onChanged(); - } else { - permissionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addPermission( - int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.add(index, builderForValue.build()); - onChanged(); - } else { - permissionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllPermission( - java.lang.Iterable values) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - super.addAll(values, permission_); - onChanged(); - } else { - permissionBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearPermission() { - if (permissionBuilder_ == null) { - permission_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - permissionBuilder_.clear(); - } - return this; - } - public Builder removePermission(int index) { - if (permissionBuilder_ == null) { - ensurePermissionIsMutable(); - permission_.remove(index); - onChanged(); - } else { - permissionBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder( - int index) { - return getPermissionFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder( - int index) { - if (permissionBuilder_ == null) { - return permission_.get(index); } else { - return permissionBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getPermissionOrBuilderList() { - if (permissionBuilder_ != null) { - return permissionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(permission_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder() { - return getPermissionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder( - int index) { - return getPermissionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()); - } - public java.util.List - getPermissionBuilderList() { - return getPermissionFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> - getPermissionFieldBuilder() { - if (permissionBuilder_ == null) { - permissionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>( - permission_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - permission_ = null; - } - return permissionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:CheckPermissionsRequest) - } - - static { - defaultInstance = new CheckPermissionsRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CheckPermissionsRequest) - } - - public interface CheckPermissionsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class CheckPermissionsResponse extends - com.google.protobuf.GeneratedMessage - implements CheckPermissionsResponseOrBuilder { - // Use CheckPermissionsResponse.newBuilder() to construct. - private CheckPermissionsResponse(Builder builder) { - super(builder); - } - private CheckPermissionsResponse(boolean noInit) {} - - private static final CheckPermissionsResponse defaultInstance; - public static CheckPermissionsResponse getDefaultInstance() { - return defaultInstance; - } - - public CheckPermissionsResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:CheckPermissionsResponse) - } - - static { - defaultInstance = new CheckPermissionsResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CheckPermissionsResponse) - } - - public static abstract class AccessControlService - implements com.google.protobuf.Service { - protected AccessControlService() {} - - public interface Interface { - public abstract void grant( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void revoke( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getUserPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void checkPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new AccessControlService() { - @java.lang.Override - public void grant( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, - com.google.protobuf.RpcCallback done) { - impl.grant(controller, request, done); - } - - @java.lang.Override - public void revoke( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, - com.google.protobuf.RpcCallback done) { - impl.revoke(controller, request, done); - } - - @java.lang.Override - public void getUserPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done) { - impl.getUserPermissions(controller, request, done); - } - - @java.lang.Override - public void checkPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, - com.google.protobuf.RpcCallback done) { - impl.checkPermissions(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.grant(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)request); - case 1: - return impl.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request); - case 2: - return impl.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request); - case 3: - return impl.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void grant( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void revoke( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getUserPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void checkPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.grant(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void grant( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance())); - } - - public void revoke( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance())); - } - - public void getUserPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance())); - } - - public void checkPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Permission_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Permission_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserPermission_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserPermission_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserTablePermissions_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserTablePermissions_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserTablePermissions_UserPermissions_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GrantRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GrantRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GrantResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GrantResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RevokeRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RevokeRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RevokeResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RevokeResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserPermissionsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserPermissionsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserPermissionsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserPermissionsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CheckPermissionsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CheckPermissionsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CheckPermissionsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CheckPermissionsResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\023AccessControl.proto\"\242\001\n\nPermission\022\"\n\006" + - "action\030\001 \003(\0162\022.Permission.Action\022\r\n\005tabl" + - "e\030\002 \001(\014\022\016\n\006family\030\003 \001(\014\022\021\n\tqualifier\030\004 \001" + - "(\014\">\n\006Action\022\010\n\004READ\020\000\022\t\n\005WRITE\020\001\022\010\n\004EXE" + - "C\020\002\022\n\n\006CREATE\020\003\022\t\n\005ADMIN\020\004\"?\n\016UserPermis" + - "sion\022\014\n\004user\030\001 \002(\014\022\037\n\npermission\030\002 \002(\0132\013" + - ".Permission\"\225\001\n\024UserTablePermissions\022:\n\013" + - "permissions\030\001 \003(\0132%.UserTablePermissions" + - ".UserPermissions\032A\n\017UserPermissions\022\014\n\004u" + - "ser\030\001 \002(\014\022 \n\013permissions\030\002 \003(\0132\013.Permiss", - "ion\"3\n\014GrantRequest\022#\n\npermission\030\001 \002(\0132" + - "\017.UserPermission\"\017\n\rGrantResponse\"4\n\rRev" + - "okeRequest\022#\n\npermission\030\001 \002(\0132\017.UserPer" + - "mission\"\020\n\016RevokeResponse\"\'\n\026UserPermiss" + - "ionsRequest\022\r\n\005table\030\001 \002(\014\">\n\027UserPermis" + - "sionsResponse\022#\n\npermission\030\001 \003(\0132\017.User" + - "Permission\":\n\027CheckPermissionsRequest\022\037\n" + - "\npermission\030\001 \003(\0132\013.Permission\"\032\n\030CheckP" + - "ermissionsResponse2\373\001\n\024AccessControlServ" + - "ice\022&\n\005grant\022\r.GrantRequest\032\016.GrantRespo", - "nse\022)\n\006revoke\022\016.RevokeRequest\032\017.RevokeRe" + - "sponse\022G\n\022getUserPermissions\022\027.UserPermi" + - "ssionsRequest\032\030.UserPermissionsResponse\022" + - "G\n\020checkPermissions\022\030.CheckPermissionsRe" + - "quest\032\031.CheckPermissionsResponseBI\n*org." + - "apache.hadoop.hbase.protobuf.generatedB\023" + - "AccessControlProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_Permission_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_Permission_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Permission_descriptor, - new java.lang.String[] { "Action", "Table", "Family", "Qualifier", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder.class); - internal_static_UserPermission_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_UserPermission_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserPermission_descriptor, - new java.lang.String[] { "User", "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder.class); - internal_static_UserTablePermissions_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_UserTablePermissions_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserTablePermissions_descriptor, - new java.lang.String[] { "Permissions", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.Builder.class); - internal_static_UserTablePermissions_UserPermissions_descriptor = - internal_static_UserTablePermissions_descriptor.getNestedTypes().get(0); - internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserTablePermissions_UserPermissions_descriptor, - new java.lang.String[] { "User", "Permissions", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder.class); - internal_static_GrantRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_GrantRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GrantRequest_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.Builder.class); - internal_static_GrantResponse_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_GrantResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GrantResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.Builder.class); - internal_static_RevokeRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_RevokeRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RevokeRequest_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.Builder.class); - internal_static_RevokeResponse_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_RevokeResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RevokeResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.Builder.class); - internal_static_UserPermissionsRequest_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_UserPermissionsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserPermissionsRequest_descriptor, - new java.lang.String[] { "Table", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); - internal_static_UserPermissionsResponse_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_UserPermissionsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserPermissionsResponse_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); - internal_static_CheckPermissionsRequest_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_CheckPermissionsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CheckPermissionsRequest_descriptor, - new java.lang.String[] { "Permission", }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.Builder.class); - internal_static_CheckPermissionsResponse_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_CheckPermissionsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CheckPermissionsResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java deleted file mode 100644 index 73434c1..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java +++ /dev/null @@ -1,16766 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Admin.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class AdminProtos { - private AdminProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface GetRegionInfoRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional bool compactionState = 2; - boolean hasCompactionState(); - boolean getCompactionState(); - } - public static final class GetRegionInfoRequest extends - com.google.protobuf.GeneratedMessage - implements GetRegionInfoRequestOrBuilder { - // Use GetRegionInfoRequest.newBuilder() to construct. - private GetRegionInfoRequest(Builder builder) { - super(builder); - } - private GetRegionInfoRequest(boolean noInit) {} - - private static final GetRegionInfoRequest defaultInstance; - public static GetRegionInfoRequest getDefaultInstance() { - return defaultInstance; - } - - public GetRegionInfoRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional bool compactionState = 2; - public static final int COMPACTIONSTATE_FIELD_NUMBER = 2; - private boolean compactionState_; - public boolean hasCompactionState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getCompactionState() { - return compactionState_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - compactionState_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, compactionState_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, compactionState_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasCompactionState() == other.hasCompactionState()); - if (hasCompactionState()) { - result = result && (getCompactionState() - == other.getCompactionState()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasCompactionState()) { - hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCompactionState()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - compactionState_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.compactionState_ = compactionState_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasCompactionState()) { - setCompactionState(other.getCompactionState()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - compactionState_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional bool compactionState = 2; - private boolean compactionState_ ; - public boolean hasCompactionState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getCompactionState() { - return compactionState_; - } - public Builder setCompactionState(boolean value) { - bitField0_ |= 0x00000002; - compactionState_ = value; - onChanged(); - return this; - } - public Builder clearCompactionState() { - bitField0_ = (bitField0_ & ~0x00000002); - compactionState_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) - } - - static { - defaultInstance = new GetRegionInfoRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) - } - - public interface GetRegionInfoResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionInfo regionInfo = 1; - boolean hasRegionInfo(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - - // optional .GetRegionInfoResponse.CompactionState compactionState = 2; - boolean hasCompactionState(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState(); - } - public static final class GetRegionInfoResponse extends - com.google.protobuf.GeneratedMessage - implements GetRegionInfoResponseOrBuilder { - // Use GetRegionInfoResponse.newBuilder() to construct. - private GetRegionInfoResponse(Builder builder) { - super(builder); - } - private GetRegionInfoResponse(boolean noInit) {} - - private static final GetRegionInfoResponse defaultInstance; - public static GetRegionInfoResponse getDefaultInstance() { - return defaultInstance; - } - - public GetRegionInfoResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; - } - - public enum CompactionState - implements com.google.protobuf.ProtocolMessageEnum { - NONE(0, 0), - MINOR(1, 1), - MAJOR(2, 2), - MAJOR_AND_MINOR(3, 3), - ; - - public static final int NONE_VALUE = 0; - public static final int MINOR_VALUE = 1; - public static final int MAJOR_VALUE = 2; - public static final int MAJOR_AND_MINOR_VALUE = 3; - - - public final int getNumber() { return value; } - - public static CompactionState valueOf(int value) { - switch (value) { - case 0: return NONE; - case 1: return MINOR; - case 2: return MAJOR; - case 3: return MAJOR_AND_MINOR; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public CompactionState findValueByNumber(int number) { - return CompactionState.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0); - } - - private static final CompactionState[] VALUES = { - NONE, MINOR, MAJOR, MAJOR_AND_MINOR, - }; - - public static CompactionState valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private CompactionState(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:GetRegionInfoResponse.CompactionState) - } - - private int bitField0_; - // required .RegionInfo regionInfo = 1; - public static final int REGIONINFO_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; - public boolean hasRegionInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; - } - - // optional .GetRegionInfoResponse.CompactionState compactionState = 2; - public static final int COMPACTIONSTATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_; - public boolean hasCompactionState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { - return compactionState_; - } - - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionInfo()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegionInfo().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, compactionState_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, compactionState_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj; - - boolean result = true; - result = result && (hasRegionInfo() == other.hasRegionInfo()); - if (hasRegionInfo()) { - result = result && getRegionInfo() - .equals(other.getRegionInfo()); - } - result = result && (hasCompactionState() == other.hasCompactionState()); - if (hasCompactionState()) { - result = result && - (getCompactionState() == other.getCompactionState()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionInfo()) { - hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; - hash = (53 * hash) + getRegionInfo().hashCode(); - } - if (hasCompactionState()) { - hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompactionState()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } else { - regionInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionInfoBuilder_ == null) { - result.regionInfo_ = regionInfo_; - } else { - result.regionInfo_ = regionInfoBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.compactionState_ = compactionState_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; - if (other.hasRegionInfo()) { - mergeRegionInfo(other.getRegionInfo()); - } - if (other.hasCompactionState()) { - setCompactionState(other.getCompactionState()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionInfo()) { - - return false; - } - if (!getRegionInfo().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegionInfo()) { - subBuilder.mergeFrom(getRegionInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionInfo(subBuilder.buildPartial()); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - compactionState_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required .RegionInfo regionInfo = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; - public boolean hasRegionInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - if (regionInfoBuilder_ == null) { - return regionInfo_; - } else { - return regionInfoBuilder_.getMessage(); - } - } - public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - regionInfo_ = value; - onChanged(); - } else { - regionInfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegionInfo( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - regionInfo_ = builderForValue.build(); - onChanged(); - } else { - regionInfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { - regionInfo_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); - } else { - regionInfo_ = value; - } - onChanged(); - } else { - regionInfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegionInfo() { - if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - onChanged(); - } else { - regionInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionInfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - if (regionInfoBuilder_ != null) { - return regionInfoBuilder_.getMessageOrBuilder(); - } else { - return regionInfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> - getRegionInfoFieldBuilder() { - if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, - getParentForChildren(), - isClean()); - regionInfo_ = null; - } - return regionInfoBuilder_; - } - - // optional .GetRegionInfoResponse.CompactionState compactionState = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; - public boolean hasCompactionState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { - return compactionState_; - } - public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - compactionState_ = value; - onChanged(); - return this; - } - public Builder clearCompactionState() { - bitField0_ = (bitField0_ & ~0x00000002); - compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) - } - - static { - defaultInstance = new GetRegionInfoResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) - } - - public interface GetStoreFileRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated bytes family = 2; - java.util.List getFamilyList(); - int getFamilyCount(); - com.google.protobuf.ByteString getFamily(int index); - } - public static final class GetStoreFileRequest extends - com.google.protobuf.GeneratedMessage - implements GetStoreFileRequestOrBuilder { - // Use GetStoreFileRequest.newBuilder() to construct. - private GetStoreFileRequest(Builder builder) { - super(builder); - } - private GetStoreFileRequest(boolean noInit) {} - - private static final GetStoreFileRequest defaultInstance; - public static GetStoreFileRequest getDefaultInstance() { - return defaultInstance; - } - - public GetStoreFileRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated bytes family = 2; - public static final int FAMILY_FIELD_NUMBER = 2; - private java.util.List family_; - public java.util.List - getFamilyList() { - return family_; - } - public int getFamilyCount() { - return family_.size(); - } - public com.google.protobuf.ByteString getFamily(int index) { - return family_.get(index); - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - family_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < family_.size(); i++) { - output.writeBytes(2, family_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - { - int dataSize = 0; - for (int i = 0; i < family_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(family_.get(i)); - } - size += dataSize; - size += 1 * getFamilyList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getFamilyList() - .equals(other.getFamilyList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getFamilyCount() > 0) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamilyList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - family_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - family_ = java.util.Collections.unmodifiableList(family_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.family_ = family_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (!other.family_.isEmpty()) { - if (family_.isEmpty()) { - family_ = other.family_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFamilyIsMutable(); - family_.addAll(other.family_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - ensureFamilyIsMutable(); - family_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated bytes family = 2; - private java.util.List family_ = java.util.Collections.emptyList();; - private void ensureFamilyIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - family_ = new java.util.ArrayList(family_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getFamilyList() { - return java.util.Collections.unmodifiableList(family_); - } - public int getFamilyCount() { - return family_.size(); - } - public com.google.protobuf.ByteString getFamily(int index) { - return family_.get(index); - } - public Builder setFamily( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyIsMutable(); - family_.set(index, value); - onChanged(); - return this; - } - public Builder addFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyIsMutable(); - family_.add(value); - onChanged(); - return this; - } - public Builder addAllFamily( - java.lang.Iterable values) { - ensureFamilyIsMutable(); - super.addAll(values, family_); - onChanged(); - return this; - } - public Builder clearFamily() { - family_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetStoreFileRequest) - } - - static { - defaultInstance = new GetStoreFileRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetStoreFileRequest) - } - - public interface GetStoreFileResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated string storeFile = 1; - java.util.List getStoreFileList(); - int getStoreFileCount(); - String getStoreFile(int index); - } - public static final class GetStoreFileResponse extends - com.google.protobuf.GeneratedMessage - implements GetStoreFileResponseOrBuilder { - // Use GetStoreFileResponse.newBuilder() to construct. - private GetStoreFileResponse(Builder builder) { - super(builder); - } - private GetStoreFileResponse(boolean noInit) {} - - private static final GetStoreFileResponse defaultInstance; - public static GetStoreFileResponse getDefaultInstance() { - return defaultInstance; - } - - public GetStoreFileResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable; - } - - // repeated string storeFile = 1; - public static final int STOREFILE_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList storeFile_; - public java.util.List - getStoreFileList() { - return storeFile_; - } - public int getStoreFileCount() { - return storeFile_.size(); - } - public String getStoreFile(int index) { - return storeFile_.get(index); - } - - private void initFields() { - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < storeFile_.size(); i++) { - output.writeBytes(1, storeFile_.getByteString(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < storeFile_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(storeFile_.getByteString(i)); - } - size += dataSize; - size += 1 * getStoreFileList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) obj; - - boolean result = true; - result = result && getStoreFileList() - .equals(other.getStoreFileList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getStoreFileCount() > 0) { - hash = (37 * hash) + STOREFILE_FIELD_NUMBER; - hash = (53 * hash) + getStoreFileList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( - storeFile_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.storeFile_ = storeFile_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()) return this; - if (!other.storeFile_.isEmpty()) { - if (storeFile_.isEmpty()) { - storeFile_ = other.storeFile_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureStoreFileIsMutable(); - storeFile_.addAll(other.storeFile_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureStoreFileIsMutable(); - storeFile_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated string storeFile = 1; - private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureStoreFileIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getStoreFileList() { - return java.util.Collections.unmodifiableList(storeFile_); - } - public int getStoreFileCount() { - return storeFile_.size(); - } - public String getStoreFile(int index) { - return storeFile_.get(index); - } - public Builder setStoreFile( - int index, String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.set(index, value); - onChanged(); - return this; - } - public Builder addStoreFile(String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.add(value); - onChanged(); - return this; - } - public Builder addAllStoreFile( - java.lang.Iterable values) { - ensureStoreFileIsMutable(); - super.addAll(values, storeFile_); - onChanged(); - return this; - } - public Builder clearStoreFile() { - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - void addStoreFile(com.google.protobuf.ByteString value) { - ensureStoreFileIsMutable(); - storeFile_.add(value); - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:GetStoreFileResponse) - } - - static { - defaultInstance = new GetStoreFileResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetStoreFileResponse) - } - - public interface GetOnlineRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class GetOnlineRegionRequest extends - com.google.protobuf.GeneratedMessage - implements GetOnlineRegionRequestOrBuilder { - // Use GetOnlineRegionRequest.newBuilder() to construct. - private GetOnlineRegionRequest(Builder builder) { - super(builder); - } - private GetOnlineRegionRequest(boolean noInit) {} - - private static final GetOnlineRegionRequest defaultInstance; - public static GetOnlineRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public GetOnlineRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) - } - - static { - defaultInstance = new GetOnlineRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) - } - - public interface GetOnlineRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .RegionInfo regionInfo = 1; - java.util.List - getRegionInfoList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); - int getRegionInfoCount(); - java.util.List - getRegionInfoOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( - int index); - } - public static final class GetOnlineRegionResponse extends - com.google.protobuf.GeneratedMessage - implements GetOnlineRegionResponseOrBuilder { - // Use GetOnlineRegionResponse.newBuilder() to construct. - private GetOnlineRegionResponse(Builder builder) { - super(builder); - } - private GetOnlineRegionResponse(boolean noInit) {} - - private static final GetOnlineRegionResponse defaultInstance; - public static GetOnlineRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public GetOnlineRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; - } - - // repeated .RegionInfo regionInfo = 1; - public static final int REGIONINFO_FIELD_NUMBER = 1; - private java.util.List regionInfo_; - public java.util.List getRegionInfoList() { - return regionInfo_; - } - public java.util.List - getRegionInfoOrBuilderList() { - return regionInfo_; - } - public int getRegionInfoCount() { - return regionInfo_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { - return regionInfo_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( - int index) { - return regionInfo_.get(index); - } - - private void initFields() { - regionInfo_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getRegionInfoCount(); i++) { - if (!getRegionInfo(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < regionInfo_.size(); i++) { - output.writeMessage(1, regionInfo_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < regionInfo_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj; - - boolean result = true; - result = result && getRegionInfoList() - .equals(other.getRegionInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRegionInfoCount() > 0) { - hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; - hash = (53 * hash) + getRegionInfoList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionInfoBuilder_ == null) { - regionInfo_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - regionInfoBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this); - int from_bitField0_ = bitField0_; - if (regionInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.regionInfo_ = regionInfo_; - } else { - result.regionInfo_ = regionInfoBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; - if (regionInfoBuilder_ == null) { - if (!other.regionInfo_.isEmpty()) { - if (regionInfo_.isEmpty()) { - regionInfo_ = other.regionInfo_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureRegionInfoIsMutable(); - regionInfo_.addAll(other.regionInfo_); - } - onChanged(); - } - } else { - if (!other.regionInfo_.isEmpty()) { - if (regionInfoBuilder_.isEmpty()) { - regionInfoBuilder_.dispose(); - regionInfoBuilder_ = null; - regionInfo_ = other.regionInfo_; - bitField0_ = (bitField0_ & ~0x00000001); - regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRegionInfoFieldBuilder() : null; - } else { - regionInfoBuilder_.addAllMessages(other.regionInfo_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getRegionInfoCount(); i++) { - if (!getRegionInfo(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionInfo(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .RegionInfo regionInfo = 1; - private java.util.List regionInfo_ = - java.util.Collections.emptyList(); - private void ensureRegionInfoIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - regionInfo_ = new java.util.ArrayList(regionInfo_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; - - public java.util.List getRegionInfoList() { - if (regionInfoBuilder_ == null) { - return java.util.Collections.unmodifiableList(regionInfo_); - } else { - return regionInfoBuilder_.getMessageList(); - } - } - public int getRegionInfoCount() { - if (regionInfoBuilder_ == null) { - return regionInfo_.size(); - } else { - return regionInfoBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { - if (regionInfoBuilder_ == null) { - return regionInfo_.get(index); - } else { - return regionInfoBuilder_.getMessage(index); - } - } - public Builder setRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionInfoIsMutable(); - regionInfo_.set(index, value); - onChanged(); - } else { - regionInfoBuilder_.setMessage(index, value); - } - return this; - } - public Builder setRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.set(index, builderForValue.build()); - onChanged(); - } else { - regionInfoBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionInfoIsMutable(); - regionInfo_.add(value); - onChanged(); - } else { - regionInfoBuilder_.addMessage(value); - } - return this; - } - public Builder addRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionInfoIsMutable(); - regionInfo_.add(index, value); - onChanged(); - } else { - regionInfoBuilder_.addMessage(index, value); - } - return this; - } - public Builder addRegionInfo( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.add(builderForValue.build()); - onChanged(); - } else { - regionInfoBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.add(index, builderForValue.build()); - onChanged(); - } else { - regionInfoBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllRegionInfo( - java.lang.Iterable values) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); - onChanged(); - } else { - regionInfoBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearRegionInfo() { - if (regionInfoBuilder_ == null) { - regionInfo_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - regionInfoBuilder_.clear(); - } - return this; - } - public Builder removeRegionInfo(int index) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.remove(index); - onChanged(); - } else { - regionInfoBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( - int index) { - return getRegionInfoFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( - int index) { - if (regionInfoBuilder_ == null) { - return regionInfo_.get(index); } else { - return regionInfoBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getRegionInfoOrBuilderList() { - if (regionInfoBuilder_ != null) { - return regionInfoBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(regionInfo_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { - return getRegionInfoFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( - int index) { - return getRegionInfoFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); - } - public java.util.List - getRegionInfoBuilderList() { - return getRegionInfoFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> - getRegionInfoFieldBuilder() { - if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - regionInfo_ = null; - } - return regionInfoBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) - } - - static { - defaultInstance = new GetOnlineRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) - } - - public interface OpenRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; - java.util.List - getOpenInfoList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index); - int getOpenInfoCount(); - java.util.List - getOpenInfoOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( - int index); - } - public static final class OpenRegionRequest extends - com.google.protobuf.GeneratedMessage - implements OpenRegionRequestOrBuilder { - // Use OpenRegionRequest.newBuilder() to construct. - private OpenRegionRequest(Builder builder) { - super(builder); - } - private OpenRegionRequest(boolean noInit) {} - - private static final OpenRegionRequest defaultInstance; - public static OpenRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public OpenRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; - } - - public interface RegionOpenInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionInfo region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder(); - - // optional uint32 versionOfOfflineNode = 2; - boolean hasVersionOfOfflineNode(); - int getVersionOfOfflineNode(); - } - public static final class RegionOpenInfo extends - com.google.protobuf.GeneratedMessage - implements RegionOpenInfoOrBuilder { - // Use RegionOpenInfo.newBuilder() to construct. - private RegionOpenInfo(Builder builder) { - super(builder); - } - private RegionOpenInfo(boolean noInit) {} - - private static final RegionOpenInfo defaultInstance; - public static RegionOpenInfo getDefaultInstance() { - return defaultInstance; - } - - public RegionOpenInfo getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionInfo region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional uint32 versionOfOfflineNode = 2; - public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2; - private int versionOfOfflineNode_; - public boolean hasVersionOfOfflineNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfOfflineNode() { - return versionOfOfflineNode_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - versionOfOfflineNode_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, versionOfOfflineNode_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, versionOfOfflineNode_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode()); - if (hasVersionOfOfflineNode()) { - result = result && (getVersionOfOfflineNode() - == other.getVersionOfOfflineNode()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasVersionOfOfflineNode()) { - hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER; - hash = (53 * hash) + getVersionOfOfflineNode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - versionOfOfflineNode_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.versionOfOfflineNode_ = versionOfOfflineNode_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasVersionOfOfflineNode()) { - setVersionOfOfflineNode(other.getVersionOfOfflineNode()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionOfOfflineNode_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionInfo region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional uint32 versionOfOfflineNode = 2; - private int versionOfOfflineNode_ ; - public boolean hasVersionOfOfflineNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfOfflineNode() { - return versionOfOfflineNode_; - } - public Builder setVersionOfOfflineNode(int value) { - bitField0_ |= 0x00000002; - versionOfOfflineNode_ = value; - onChanged(); - return this; - } - public Builder clearVersionOfOfflineNode() { - bitField0_ = (bitField0_ & ~0x00000002); - versionOfOfflineNode_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:OpenRegionRequest.RegionOpenInfo) - } - - static { - defaultInstance = new RegionOpenInfo(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OpenRegionRequest.RegionOpenInfo) - } - - // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; - public static final int OPENINFO_FIELD_NUMBER = 1; - private java.util.List openInfo_; - public java.util.List getOpenInfoList() { - return openInfo_; - } - public java.util.List - getOpenInfoOrBuilderList() { - return openInfo_; - } - public int getOpenInfoCount() { - return openInfo_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { - return openInfo_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( - int index) { - return openInfo_.get(index); - } - - private void initFields() { - openInfo_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getOpenInfoCount(); i++) { - if (!getOpenInfo(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < openInfo_.size(); i++) { - output.writeMessage(1, openInfo_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < openInfo_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, openInfo_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj; - - boolean result = true; - result = result && getOpenInfoList() - .equals(other.getOpenInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getOpenInfoCount() > 0) { - hash = (37 * hash) + OPENINFO_FIELD_NUMBER; - hash = (53 * hash) + getOpenInfoList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getOpenInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (openInfoBuilder_ == null) { - openInfo_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - openInfoBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this); - int from_bitField0_ = bitField0_; - if (openInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - openInfo_ = java.util.Collections.unmodifiableList(openInfo_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.openInfo_ = openInfo_; - } else { - result.openInfo_ = openInfoBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this; - if (openInfoBuilder_ == null) { - if (!other.openInfo_.isEmpty()) { - if (openInfo_.isEmpty()) { - openInfo_ = other.openInfo_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureOpenInfoIsMutable(); - openInfo_.addAll(other.openInfo_); - } - onChanged(); - } - } else { - if (!other.openInfo_.isEmpty()) { - if (openInfoBuilder_.isEmpty()) { - openInfoBuilder_.dispose(); - openInfoBuilder_ = null; - openInfo_ = other.openInfo_; - bitField0_ = (bitField0_ & ~0x00000001); - openInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getOpenInfoFieldBuilder() : null; - } else { - openInfoBuilder_.addAllMessages(other.openInfo_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getOpenInfoCount(); i++) { - if (!getOpenInfo(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addOpenInfo(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1; - private java.util.List openInfo_ = - java.util.Collections.emptyList(); - private void ensureOpenInfoIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - openInfo_ = new java.util.ArrayList(openInfo_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_; - - public java.util.List getOpenInfoList() { - if (openInfoBuilder_ == null) { - return java.util.Collections.unmodifiableList(openInfo_); - } else { - return openInfoBuilder_.getMessageList(); - } - } - public int getOpenInfoCount() { - if (openInfoBuilder_ == null) { - return openInfo_.size(); - } else { - return openInfoBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { - if (openInfoBuilder_ == null) { - return openInfo_.get(index); - } else { - return openInfoBuilder_.getMessage(index); - } - } - public Builder setOpenInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { - if (openInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpenInfoIsMutable(); - openInfo_.set(index, value); - onChanged(); - } else { - openInfoBuilder_.setMessage(index, value); - } - return this; - } - public Builder setOpenInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { - if (openInfoBuilder_ == null) { - ensureOpenInfoIsMutable(); - openInfo_.set(index, builderForValue.build()); - onChanged(); - } else { - openInfoBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { - if (openInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpenInfoIsMutable(); - openInfo_.add(value); - onChanged(); - } else { - openInfoBuilder_.addMessage(value); - } - return this; - } - public Builder addOpenInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { - if (openInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpenInfoIsMutable(); - openInfo_.add(index, value); - onChanged(); - } else { - openInfoBuilder_.addMessage(index, value); - } - return this; - } - public Builder addOpenInfo( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { - if (openInfoBuilder_ == null) { - ensureOpenInfoIsMutable(); - openInfo_.add(builderForValue.build()); - onChanged(); - } else { - openInfoBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addOpenInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { - if (openInfoBuilder_ == null) { - ensureOpenInfoIsMutable(); - openInfo_.add(index, builderForValue.build()); - onChanged(); - } else { - openInfoBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllOpenInfo( - java.lang.Iterable values) { - if (openInfoBuilder_ == null) { - ensureOpenInfoIsMutable(); - super.addAll(values, openInfo_); - onChanged(); - } else { - openInfoBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearOpenInfo() { - if (openInfoBuilder_ == null) { - openInfo_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - openInfoBuilder_.clear(); - } - return this; - } - public Builder removeOpenInfo(int index) { - if (openInfoBuilder_ == null) { - ensureOpenInfoIsMutable(); - openInfo_.remove(index); - onChanged(); - } else { - openInfoBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder getOpenInfoBuilder( - int index) { - return getOpenInfoFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( - int index) { - if (openInfoBuilder_ == null) { - return openInfo_.get(index); } else { - return openInfoBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getOpenInfoOrBuilderList() { - if (openInfoBuilder_ != null) { - return openInfoBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(openInfo_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder() { - return getOpenInfoFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder( - int index) { - return getOpenInfoFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); - } - public java.util.List - getOpenInfoBuilderList() { - return getOpenInfoFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> - getOpenInfoFieldBuilder() { - if (openInfoBuilder_ == null) { - openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>( - openInfo_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - openInfo_ = null; - } - return openInfoBuilder_; - } - - // @@protoc_insertion_point(builder_scope:OpenRegionRequest) - } - - static { - defaultInstance = new OpenRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OpenRegionRequest) - } - - public interface OpenRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - java.util.List getOpeningStateList(); - int getOpeningStateCount(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); - } - public static final class OpenRegionResponse extends - com.google.protobuf.GeneratedMessage - implements OpenRegionResponseOrBuilder { - // Use OpenRegionResponse.newBuilder() to construct. - private OpenRegionResponse(Builder builder) { - super(builder); - } - private OpenRegionResponse(boolean noInit) {} - - private static final OpenRegionResponse defaultInstance; - public static OpenRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public OpenRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; - } - - public enum RegionOpeningState - implements com.google.protobuf.ProtocolMessageEnum { - OPENED(0, 0), - ALREADY_OPENED(1, 1), - FAILED_OPENING(2, 2), - ; - - public static final int OPENED_VALUE = 0; - public static final int ALREADY_OPENED_VALUE = 1; - public static final int FAILED_OPENING_VALUE = 2; - - - public final int getNumber() { return value; } - - public static RegionOpeningState valueOf(int value) { - switch (value) { - case 0: return OPENED; - case 1: return ALREADY_OPENED; - case 2: return FAILED_OPENING; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public RegionOpeningState findValueByNumber(int number) { - return RegionOpeningState.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); - } - - private static final RegionOpeningState[] VALUES = { - OPENED, ALREADY_OPENED, FAILED_OPENING, - }; - - public static RegionOpeningState valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private RegionOpeningState(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) - } - - // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - public static final int OPENINGSTATE_FIELD_NUMBER = 1; - private java.util.List openingState_; - public java.util.List getOpeningStateList() { - return openingState_; - } - public int getOpeningStateCount() { - return openingState_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { - return openingState_.get(index); - } - - private void initFields() { - openingState_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < openingState_.size(); i++) { - output.writeEnum(1, openingState_.get(i).getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < openingState_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeEnumSizeNoTag(openingState_.get(i).getNumber()); - } - size += dataSize; - size += 1 * openingState_.size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj; - - boolean result = true; - result = result && getOpeningStateList() - .equals(other.getOpeningStateList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getOpeningStateCount() > 0) { - hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnumList(getOpeningStateList()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - openingState_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = java.util.Collections.unmodifiableList(openingState_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.openingState_ = openingState_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this; - if (!other.openingState_.isEmpty()) { - if (openingState_.isEmpty()) { - openingState_ = other.openingState_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureOpeningStateIsMutable(); - openingState_.addAll(other.openingState_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addOpeningState(value); - } - break; - } - case 10: { - int length = input.readRawVarint32(); - int oldLimit = input.pushLimit(length); - while(input.getBytesUntilLimit() > 0) { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addOpeningState(value); - } - } - input.popLimit(oldLimit); - break; - } - } - } - } - - private int bitField0_; - - // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - private java.util.List openingState_ = - java.util.Collections.emptyList(); - private void ensureOpeningStateIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = new java.util.ArrayList(openingState_); - bitField0_ |= 0x00000001; - } - } - public java.util.List getOpeningStateList() { - return java.util.Collections.unmodifiableList(openingState_); - } - public int getOpeningStateCount() { - return openingState_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { - return openingState_.get(index); - } - public Builder setOpeningState( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpeningStateIsMutable(); - openingState_.set(index, value); - onChanged(); - return this; - } - public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpeningStateIsMutable(); - openingState_.add(value); - onChanged(); - return this; - } - public Builder addAllOpeningState( - java.lang.Iterable values) { - ensureOpeningStateIsMutable(); - super.addAll(values, openingState_); - onChanged(); - return this; - } - public Builder clearOpeningState() { - openingState_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:OpenRegionResponse) - } - - static { - defaultInstance = new OpenRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OpenRegionResponse) - } - - public interface CloseRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional uint32 versionOfClosingNode = 2; - boolean hasVersionOfClosingNode(); - int getVersionOfClosingNode(); - - // optional bool transitionInZK = 3 [default = true]; - boolean hasTransitionInZK(); - boolean getTransitionInZK(); - - // optional .ServerName destinationServer = 4; - boolean hasDestinationServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder(); - } - public static final class CloseRegionRequest extends - com.google.protobuf.GeneratedMessage - implements CloseRegionRequestOrBuilder { - // Use CloseRegionRequest.newBuilder() to construct. - private CloseRegionRequest(Builder builder) { - super(builder); - } - private CloseRegionRequest(boolean noInit) {} - - private static final CloseRegionRequest defaultInstance; - public static CloseRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public CloseRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional uint32 versionOfClosingNode = 2; - public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2; - private int versionOfClosingNode_; - public boolean hasVersionOfClosingNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfClosingNode() { - return versionOfClosingNode_; - } - - // optional bool transitionInZK = 3 [default = true]; - public static final int TRANSITIONINZK_FIELD_NUMBER = 3; - private boolean transitionInZK_; - public boolean hasTransitionInZK() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getTransitionInZK() { - return transitionInZK_; - } - - // optional .ServerName destinationServer = 4; - public static final int DESTINATIONSERVER_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_; - public boolean hasDestinationServer() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { - return destinationServer_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { - return destinationServer_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - versionOfClosingNode_ = 0; - transitionInZK_ = true; - destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (hasDestinationServer()) { - if (!getDestinationServer().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, versionOfClosingNode_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBool(3, transitionInZK_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, destinationServer_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, versionOfClosingNode_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, transitionInZK_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, destinationServer_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode()); - if (hasVersionOfClosingNode()) { - result = result && (getVersionOfClosingNode() - == other.getVersionOfClosingNode()); - } - result = result && (hasTransitionInZK() == other.hasTransitionInZK()); - if (hasTransitionInZK()) { - result = result && (getTransitionInZK() - == other.getTransitionInZK()); - } - result = result && (hasDestinationServer() == other.hasDestinationServer()); - if (hasDestinationServer()) { - result = result && getDestinationServer() - .equals(other.getDestinationServer()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasVersionOfClosingNode()) { - hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER; - hash = (53 * hash) + getVersionOfClosingNode(); - } - if (hasTransitionInZK()) { - hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getTransitionInZK()); - } - if (hasDestinationServer()) { - hash = (37 * hash) + DESTINATIONSERVER_FIELD_NUMBER; - hash = (53 * hash) + getDestinationServer().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getDestinationServerFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - versionOfClosingNode_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - transitionInZK_ = true; - bitField0_ = (bitField0_ & ~0x00000004); - if (destinationServerBuilder_ == null) { - destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - destinationServerBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.versionOfClosingNode_ = versionOfClosingNode_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.transitionInZK_ = transitionInZK_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - if (destinationServerBuilder_ == null) { - result.destinationServer_ = destinationServer_; - } else { - result.destinationServer_ = destinationServerBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasVersionOfClosingNode()) { - setVersionOfClosingNode(other.getVersionOfClosingNode()); - } - if (other.hasTransitionInZK()) { - setTransitionInZK(other.getTransitionInZK()); - } - if (other.hasDestinationServer()) { - mergeDestinationServer(other.getDestinationServer()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (hasDestinationServer()) { - if (!getDestinationServer().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionOfClosingNode_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - transitionInZK_ = input.readBool(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasDestinationServer()) { - subBuilder.mergeFrom(getDestinationServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setDestinationServer(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional uint32 versionOfClosingNode = 2; - private int versionOfClosingNode_ ; - public boolean hasVersionOfClosingNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfClosingNode() { - return versionOfClosingNode_; - } - public Builder setVersionOfClosingNode(int value) { - bitField0_ |= 0x00000002; - versionOfClosingNode_ = value; - onChanged(); - return this; - } - public Builder clearVersionOfClosingNode() { - bitField0_ = (bitField0_ & ~0x00000002); - versionOfClosingNode_ = 0; - onChanged(); - return this; - } - - // optional bool transitionInZK = 3 [default = true]; - private boolean transitionInZK_ = true; - public boolean hasTransitionInZK() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getTransitionInZK() { - return transitionInZK_; - } - public Builder setTransitionInZK(boolean value) { - bitField0_ |= 0x00000004; - transitionInZK_ = value; - onChanged(); - return this; - } - public Builder clearTransitionInZK() { - bitField0_ = (bitField0_ & ~0x00000004); - transitionInZK_ = true; - onChanged(); - return this; - } - - // optional .ServerName destinationServer = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_; - public boolean hasDestinationServer() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { - if (destinationServerBuilder_ == null) { - return destinationServer_; - } else { - return destinationServerBuilder_.getMessage(); - } - } - public Builder setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (destinationServerBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - destinationServer_ = value; - onChanged(); - } else { - destinationServerBuilder_.setMessage(value); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder setDestinationServer( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (destinationServerBuilder_ == null) { - destinationServer_ = builderForValue.build(); - onChanged(); - } else { - destinationServerBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (destinationServerBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008) && - destinationServer_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - destinationServer_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial(); - } else { - destinationServer_ = value; - } - onChanged(); - } else { - destinationServerBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder clearDestinationServer() { - if (destinationServerBuilder_ == null) { - destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - destinationServerBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestinationServerBuilder() { - bitField0_ |= 0x00000008; - onChanged(); - return getDestinationServerFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { - if (destinationServerBuilder_ != null) { - return destinationServerBuilder_.getMessageOrBuilder(); - } else { - return destinationServer_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getDestinationServerFieldBuilder() { - if (destinationServerBuilder_ == null) { - destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - destinationServer_, - getParentForChildren(), - isClean()); - destinationServer_ = null; - } - return destinationServerBuilder_; - } - - // @@protoc_insertion_point(builder_scope:CloseRegionRequest) - } - - static { - defaultInstance = new CloseRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CloseRegionRequest) - } - - public interface CloseRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool closed = 1; - boolean hasClosed(); - boolean getClosed(); - } - public static final class CloseRegionResponse extends - com.google.protobuf.GeneratedMessage - implements CloseRegionResponseOrBuilder { - // Use CloseRegionResponse.newBuilder() to construct. - private CloseRegionResponse(Builder builder) { - super(builder); - } - private CloseRegionResponse(boolean noInit) {} - - private static final CloseRegionResponse defaultInstance; - public static CloseRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public CloseRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool closed = 1; - public static final int CLOSED_FIELD_NUMBER = 1; - private boolean closed_; - public boolean hasClosed() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getClosed() { - return closed_; - } - - private void initFields() { - closed_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasClosed()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, closed_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, closed_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj; - - boolean result = true; - result = result && (hasClosed() == other.hasClosed()); - if (hasClosed()) { - result = result && (getClosed() - == other.getClosed()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasClosed()) { - hash = (37 * hash) + CLOSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClosed()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - closed_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.closed_ = closed_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this; - if (other.hasClosed()) { - setClosed(other.getClosed()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasClosed()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - closed_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool closed = 1; - private boolean closed_ ; - public boolean hasClosed() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getClosed() { - return closed_; - } - public Builder setClosed(boolean value) { - bitField0_ |= 0x00000001; - closed_ = value; - onChanged(); - return this; - } - public Builder clearClosed() { - bitField0_ = (bitField0_ & ~0x00000001); - closed_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CloseRegionResponse) - } - - static { - defaultInstance = new CloseRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CloseRegionResponse) - } - - public interface FlushRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional uint64 ifOlderThanTs = 2; - boolean hasIfOlderThanTs(); - long getIfOlderThanTs(); - } - public static final class FlushRegionRequest extends - com.google.protobuf.GeneratedMessage - implements FlushRegionRequestOrBuilder { - // Use FlushRegionRequest.newBuilder() to construct. - private FlushRegionRequest(Builder builder) { - super(builder); - } - private FlushRegionRequest(boolean noInit) {} - - private static final FlushRegionRequest defaultInstance; - public static FlushRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public FlushRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional uint64 ifOlderThanTs = 2; - public static final int IFOLDERTHANTS_FIELD_NUMBER = 2; - private long ifOlderThanTs_; - public boolean hasIfOlderThanTs() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getIfOlderThanTs() { - return ifOlderThanTs_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - ifOlderThanTs_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, ifOlderThanTs_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, ifOlderThanTs_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs()); - if (hasIfOlderThanTs()) { - result = result && (getIfOlderThanTs() - == other.getIfOlderThanTs()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasIfOlderThanTs()) { - hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getIfOlderThanTs()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - ifOlderThanTs_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.ifOlderThanTs_ = ifOlderThanTs_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasIfOlderThanTs()) { - setIfOlderThanTs(other.getIfOlderThanTs()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - ifOlderThanTs_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional uint64 ifOlderThanTs = 2; - private long ifOlderThanTs_ ; - public boolean hasIfOlderThanTs() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getIfOlderThanTs() { - return ifOlderThanTs_; - } - public Builder setIfOlderThanTs(long value) { - bitField0_ |= 0x00000002; - ifOlderThanTs_ = value; - onChanged(); - return this; - } - public Builder clearIfOlderThanTs() { - bitField0_ = (bitField0_ & ~0x00000002); - ifOlderThanTs_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FlushRegionRequest) - } - - static { - defaultInstance = new FlushRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FlushRegionRequest) - } - - public interface FlushRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 lastFlushTime = 1; - boolean hasLastFlushTime(); - long getLastFlushTime(); - - // optional bool flushed = 2; - boolean hasFlushed(); - boolean getFlushed(); - } - public static final class FlushRegionResponse extends - com.google.protobuf.GeneratedMessage - implements FlushRegionResponseOrBuilder { - // Use FlushRegionResponse.newBuilder() to construct. - private FlushRegionResponse(Builder builder) { - super(builder); - } - private FlushRegionResponse(boolean noInit) {} - - private static final FlushRegionResponse defaultInstance; - public static FlushRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public FlushRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 lastFlushTime = 1; - public static final int LASTFLUSHTIME_FIELD_NUMBER = 1; - private long lastFlushTime_; - public boolean hasLastFlushTime() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLastFlushTime() { - return lastFlushTime_; - } - - // optional bool flushed = 2; - public static final int FLUSHED_FIELD_NUMBER = 2; - private boolean flushed_; - public boolean hasFlushed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getFlushed() { - return flushed_; - } - - private void initFields() { - lastFlushTime_ = 0L; - flushed_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLastFlushTime()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, lastFlushTime_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, flushed_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, lastFlushTime_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, flushed_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj; - - boolean result = true; - result = result && (hasLastFlushTime() == other.hasLastFlushTime()); - if (hasLastFlushTime()) { - result = result && (getLastFlushTime() - == other.getLastFlushTime()); - } - result = result && (hasFlushed() == other.hasFlushed()); - if (hasFlushed()) { - result = result && (getFlushed() - == other.getFlushed()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLastFlushTime()) { - hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastFlushTime()); - } - if (hasFlushed()) { - hash = (37 * hash) + FLUSHED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getFlushed()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lastFlushTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - flushed_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lastFlushTime_ = lastFlushTime_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.flushed_ = flushed_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this; - if (other.hasLastFlushTime()) { - setLastFlushTime(other.getLastFlushTime()); - } - if (other.hasFlushed()) { - setFlushed(other.getFlushed()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLastFlushTime()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lastFlushTime_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - flushed_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 lastFlushTime = 1; - private long lastFlushTime_ ; - public boolean hasLastFlushTime() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLastFlushTime() { - return lastFlushTime_; - } - public Builder setLastFlushTime(long value) { - bitField0_ |= 0x00000001; - lastFlushTime_ = value; - onChanged(); - return this; - } - public Builder clearLastFlushTime() { - bitField0_ = (bitField0_ & ~0x00000001); - lastFlushTime_ = 0L; - onChanged(); - return this; - } - - // optional bool flushed = 2; - private boolean flushed_ ; - public boolean hasFlushed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getFlushed() { - return flushed_; - } - public Builder setFlushed(boolean value) { - bitField0_ |= 0x00000002; - flushed_ = value; - onChanged(); - return this; - } - public Builder clearFlushed() { - bitField0_ = (bitField0_ & ~0x00000002); - flushed_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FlushRegionResponse) - } - - static { - defaultInstance = new FlushRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FlushRegionResponse) - } - - public interface SplitRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional bytes splitPoint = 2; - boolean hasSplitPoint(); - com.google.protobuf.ByteString getSplitPoint(); - } - public static final class SplitRegionRequest extends - com.google.protobuf.GeneratedMessage - implements SplitRegionRequestOrBuilder { - // Use SplitRegionRequest.newBuilder() to construct. - private SplitRegionRequest(Builder builder) { - super(builder); - } - private SplitRegionRequest(boolean noInit) {} - - private static final SplitRegionRequest defaultInstance; - public static SplitRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public SplitRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional bytes splitPoint = 2; - public static final int SPLITPOINT_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString splitPoint_; - public boolean hasSplitPoint() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSplitPoint() { - return splitPoint_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - splitPoint_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, splitPoint_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, splitPoint_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasSplitPoint() == other.hasSplitPoint()); - if (hasSplitPoint()) { - result = result && getSplitPoint() - .equals(other.getSplitPoint()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasSplitPoint()) { - hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER; - hash = (53 * hash) + getSplitPoint().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - splitPoint_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.splitPoint_ = splitPoint_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasSplitPoint()) { - setSplitPoint(other.getSplitPoint()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - splitPoint_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional bytes splitPoint = 2; - private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSplitPoint() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSplitPoint() { - return splitPoint_; - } - public Builder setSplitPoint(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - splitPoint_ = value; - onChanged(); - return this; - } - public Builder clearSplitPoint() { - bitField0_ = (bitField0_ & ~0x00000002); - splitPoint_ = getDefaultInstance().getSplitPoint(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SplitRegionRequest) - } - - static { - defaultInstance = new SplitRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SplitRegionRequest) - } - - public interface SplitRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class SplitRegionResponse extends - com.google.protobuf.GeneratedMessage - implements SplitRegionResponseOrBuilder { - // Use SplitRegionResponse.newBuilder() to construct. - private SplitRegionResponse(Builder builder) { - super(builder); - } - private SplitRegionResponse(boolean noInit) {} - - private static final SplitRegionResponse defaultInstance; - public static SplitRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public SplitRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:SplitRegionResponse) - } - - static { - defaultInstance = new SplitRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SplitRegionResponse) - } - - public interface CompactRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional bool major = 2; - boolean hasMajor(); - boolean getMajor(); - - // optional bytes family = 3; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - } - public static final class CompactRegionRequest extends - com.google.protobuf.GeneratedMessage - implements CompactRegionRequestOrBuilder { - // Use CompactRegionRequest.newBuilder() to construct. - private CompactRegionRequest(Builder builder) { - super(builder); - } - private CompactRegionRequest(boolean noInit) {} - - private static final CompactRegionRequest defaultInstance; - public static CompactRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public CompactRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional bool major = 2; - public static final int MAJOR_FIELD_NUMBER = 2; - private boolean major_; - public boolean hasMajor() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMajor() { - return major_; - } - - // optional bytes family = 3; - public static final int FAMILY_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - major_ = false; - family_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, major_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, family_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, major_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, family_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasMajor() == other.hasMajor()); - if (hasMajor()) { - result = result && (getMajor() - == other.getMajor()); - } - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasMajor()) { - hash = (37 * hash) + MAJOR_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMajor()); - } - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - major_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.major_ = major_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.family_ = family_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasMajor()) { - setMajor(other.getMajor()); - } - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - major_ = input.readBool(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - family_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional bool major = 2; - private boolean major_ ; - public boolean hasMajor() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMajor() { - return major_; - } - public Builder setMajor(boolean value) { - bitField0_ |= 0x00000002; - major_ = value; - onChanged(); - return this; - } - public Builder clearMajor() { - bitField0_ = (bitField0_ & ~0x00000002); - major_ = false; - onChanged(); - return this; - } - - // optional bytes family = 3; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000004); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CompactRegionRequest) - } - - static { - defaultInstance = new CompactRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CompactRegionRequest) - } - - public interface CompactRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class CompactRegionResponse extends - com.google.protobuf.GeneratedMessage - implements CompactRegionResponseOrBuilder { - // Use CompactRegionResponse.newBuilder() to construct. - private CompactRegionResponse(Builder builder) { - super(builder); - } - private CompactRegionResponse(boolean noInit) {} - - private static final CompactRegionResponse defaultInstance; - public static CompactRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public CompactRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:CompactRegionResponse) - } - - static { - defaultInstance = new CompactRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CompactRegionResponse) - } - - public interface UUIDOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 leastSigBits = 1; - boolean hasLeastSigBits(); - long getLeastSigBits(); - - // required uint64 mostSigBits = 2; - boolean hasMostSigBits(); - long getMostSigBits(); - } - public static final class UUID extends - com.google.protobuf.GeneratedMessage - implements UUIDOrBuilder { - // Use UUID.newBuilder() to construct. - private UUID(Builder builder) { - super(builder); - } - private UUID(boolean noInit) {} - - private static final UUID defaultInstance; - public static UUID getDefaultInstance() { - return defaultInstance; - } - - public UUID getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 leastSigBits = 1; - public static final int LEASTSIGBITS_FIELD_NUMBER = 1; - private long leastSigBits_; - public boolean hasLeastSigBits() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLeastSigBits() { - return leastSigBits_; - } - - // required uint64 mostSigBits = 2; - public static final int MOSTSIGBITS_FIELD_NUMBER = 2; - private long mostSigBits_; - public boolean hasMostSigBits() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getMostSigBits() { - return mostSigBits_; - } - - private void initFields() { - leastSigBits_ = 0L; - mostSigBits_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLeastSigBits()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMostSigBits()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, leastSigBits_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, mostSigBits_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, leastSigBits_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, mostSigBits_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj; - - boolean result = true; - result = result && (hasLeastSigBits() == other.hasLeastSigBits()); - if (hasLeastSigBits()) { - result = result && (getLeastSigBits() - == other.getLeastSigBits()); - } - result = result && (hasMostSigBits() == other.hasMostSigBits()); - if (hasMostSigBits()) { - result = result && (getMostSigBits() - == other.getMostSigBits()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLeastSigBits()) { - hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLeastSigBits()); - } - if (hasMostSigBits()) { - hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMostSigBits()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - leastSigBits_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - mostSigBits_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.leastSigBits_ = leastSigBits_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.mostSigBits_ = mostSigBits_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this; - if (other.hasLeastSigBits()) { - setLeastSigBits(other.getLeastSigBits()); - } - if (other.hasMostSigBits()) { - setMostSigBits(other.getMostSigBits()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLeastSigBits()) { - - return false; - } - if (!hasMostSigBits()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - leastSigBits_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - mostSigBits_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 leastSigBits = 1; - private long leastSigBits_ ; - public boolean hasLeastSigBits() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLeastSigBits() { - return leastSigBits_; - } - public Builder setLeastSigBits(long value) { - bitField0_ |= 0x00000001; - leastSigBits_ = value; - onChanged(); - return this; - } - public Builder clearLeastSigBits() { - bitField0_ = (bitField0_ & ~0x00000001); - leastSigBits_ = 0L; - onChanged(); - return this; - } - - // required uint64 mostSigBits = 2; - private long mostSigBits_ ; - public boolean hasMostSigBits() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getMostSigBits() { - return mostSigBits_; - } - public Builder setMostSigBits(long value) { - bitField0_ |= 0x00000002; - mostSigBits_ = value; - onChanged(); - return this; - } - public Builder clearMostSigBits() { - bitField0_ = (bitField0_ & ~0x00000002); - mostSigBits_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UUID) - } - - static { - defaultInstance = new UUID(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UUID) - } - - public interface WALEntryOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .WALEntry.WALKey key = 1; - boolean hasKey(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder(); - - // required .WALEntry.WALEdit edit = 2; - boolean hasEdit(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); - } - public static final class WALEntry extends - com.google.protobuf.GeneratedMessage - implements WALEntryOrBuilder { - // Use WALEntry.newBuilder() to construct. - private WALEntry(Builder builder) { - super(builder); - } - private WALEntry(boolean noInit) {} - - private static final WALEntry defaultInstance; - public static WALEntry getDefaultInstance() { - return defaultInstance; - } - - public WALEntry getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; - } - - public interface WALKeyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes encodedRegionName = 1; - boolean hasEncodedRegionName(); - com.google.protobuf.ByteString getEncodedRegionName(); - - // required bytes tableName = 2; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required uint64 logSequenceNumber = 3; - boolean hasLogSequenceNumber(); - long getLogSequenceNumber(); - - // required uint64 writeTime = 4; - boolean hasWriteTime(); - long getWriteTime(); - - // optional .UUID clusterId = 5; - boolean hasClusterId(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); - } - public static final class WALKey extends - com.google.protobuf.GeneratedMessage - implements WALKeyOrBuilder { - // Use WALKey.newBuilder() to construct. - private WALKey(Builder builder) { - super(builder); - } - private WALKey(boolean noInit) {} - - private static final WALKey defaultInstance; - public static WALKey getDefaultInstance() { - return defaultInstance; - } - - public WALKey getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; - } - - private int bitField0_; - // required bytes encodedRegionName = 1; - public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString encodedRegionName_; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - - // required bytes tableName = 2; - public static final int TABLENAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required uint64 logSequenceNumber = 3; - public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; - private long logSequenceNumber_; - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - - // required uint64 writeTime = 4; - public static final int WRITETIME_FIELD_NUMBER = 4; - private long writeTime_; - public boolean hasWriteTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getWriteTime() { - return writeTime_; - } - - // optional .UUID clusterId = 5; - public static final int CLUSTERID_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { - return clusterId_; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { - return clusterId_; - } - - private void initFields() { - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - tableName_ = com.google.protobuf.ByteString.EMPTY; - logSequenceNumber_ = 0L; - writeTime_ = 0L; - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasEncodedRegionName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasLogSequenceNumber()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasWriteTime()) { - memoizedIsInitialized = 0; - return false; - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, encodedRegionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, logSequenceNumber_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(4, writeTime_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, clusterId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, encodedRegionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, logSequenceNumber_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, writeTime_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, clusterId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj; - - boolean result = true; - result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); - if (hasEncodedRegionName()) { - result = result && getEncodedRegionName() - .equals(other.getEncodedRegionName()); - } - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); - if (hasLogSequenceNumber()) { - result = result && (getLogSequenceNumber() - == other.getLogSequenceNumber()); - } - result = result && (hasWriteTime() == other.hasWriteTime()); - if (hasWriteTime()) { - result = result && (getWriteTime() - == other.getWriteTime()); - } - result = result && (hasClusterId() == other.hasClusterId()); - if (hasClusterId()) { - result = result && getClusterId() - .equals(other.getClusterId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEncodedRegionName()) { - hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getEncodedRegionName().hashCode(); - } - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasLogSequenceNumber()) { - hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogSequenceNumber()); - } - if (hasWriteTime()) { - hash = (37 * hash) + WRITETIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getWriteTime()); - } - if (hasClusterId()) { - hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; - hash = (53 * hash) + getClusterId().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getClusterIdFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - logSequenceNumber_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - writeTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.encodedRegionName_ = encodedRegionName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.logSequenceNumber_ = logSequenceNumber_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.writeTime_ = writeTime_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - if (clusterIdBuilder_ == null) { - result.clusterId_ = clusterId_; - } else { - result.clusterId_ = clusterIdBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; - if (other.hasEncodedRegionName()) { - setEncodedRegionName(other.getEncodedRegionName()); - } - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasLogSequenceNumber()) { - setLogSequenceNumber(other.getLogSequenceNumber()); - } - if (other.hasWriteTime()) { - setWriteTime(other.getWriteTime()); - } - if (other.hasClusterId()) { - mergeClusterId(other.getClusterId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasEncodedRegionName()) { - - return false; - } - if (!hasTableName()) { - - return false; - } - if (!hasLogSequenceNumber()) { - - return false; - } - if (!hasWriteTime()) { - - return false; - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - encodedRegionName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - logSequenceNumber_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - writeTime_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(); - if (hasClusterId()) { - subBuilder.mergeFrom(getClusterId()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterId(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes encodedRegionName = 1; - private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - encodedRegionName_ = value; - onChanged(); - return this; - } - public Builder clearEncodedRegionName() { - bitField0_ = (bitField0_ & ~0x00000001); - encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); - onChanged(); - return this; - } - - // required bytes tableName = 2; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000002); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required uint64 logSequenceNumber = 3; - private long logSequenceNumber_ ; - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - public Builder setLogSequenceNumber(long value) { - bitField0_ |= 0x00000004; - logSequenceNumber_ = value; - onChanged(); - return this; - } - public Builder clearLogSequenceNumber() { - bitField0_ = (bitField0_ & ~0x00000004); - logSequenceNumber_ = 0L; - onChanged(); - return this; - } - - // required uint64 writeTime = 4; - private long writeTime_ ; - public boolean hasWriteTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getWriteTime() { - return writeTime_; - } - public Builder setWriteTime(long value) { - bitField0_ |= 0x00000008; - writeTime_ = value; - onChanged(); - return this; - } - public Builder clearWriteTime() { - bitField0_ = (bitField0_ & ~0x00000008); - writeTime_ = 0L; - onChanged(); - return this; - } - - // optional .UUID clusterId = 5; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { - if (clusterIdBuilder_ == null) { - return clusterId_; - } else { - return clusterIdBuilder_.getMessage(); - } - } - public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { - if (clusterIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - clusterId_ = value; - onChanged(); - } else { - clusterIdBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setClusterId( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) { - if (clusterIdBuilder_ == null) { - clusterId_ = builderForValue.build(); - onChanged(); - } else { - clusterIdBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { - if (clusterIdBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - clusterId_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) { - clusterId_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); - } else { - clusterId_ = value; - } - onChanged(); - } else { - clusterIdBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearClusterId() { - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - onChanged(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getClusterIdFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { - if (clusterIdBuilder_ != null) { - return clusterIdBuilder_.getMessageOrBuilder(); - } else { - return clusterId_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> - getClusterIdFieldBuilder() { - if (clusterIdBuilder_ == null) { - clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>( - clusterId_, - getParentForChildren(), - isClean()); - clusterId_ = null; - } - return clusterIdBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) - } - - static { - defaultInstance = new WALKey(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALKey) - } - - public interface WALEditOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes keyValueBytes = 1; - java.util.List getKeyValueBytesList(); - int getKeyValueBytesCount(); - com.google.protobuf.ByteString getKeyValueBytes(int index); - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - java.util.List - getFamilyScopeList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); - int getFamilyScopeCount(); - java.util.List - getFamilyScopeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index); - } - public static final class WALEdit extends - com.google.protobuf.GeneratedMessage - implements WALEditOrBuilder { - // Use WALEdit.newBuilder() to construct. - private WALEdit(Builder builder) { - super(builder); - } - private WALEdit(boolean noInit) {} - - private static final WALEdit defaultInstance; - public static WALEdit getDefaultInstance() { - return defaultInstance; - } - - public WALEdit getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; - } - - public enum ScopeType - implements com.google.protobuf.ProtocolMessageEnum { - REPLICATION_SCOPE_LOCAL(0, 0), - REPLICATION_SCOPE_GLOBAL(1, 1), - ; - - public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; - public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; - - - public final int getNumber() { return value; } - - public static ScopeType valueOf(int value) { - switch (value) { - case 0: return REPLICATION_SCOPE_LOCAL; - case 1: return REPLICATION_SCOPE_GLOBAL; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public ScopeType findValueByNumber(int number) { - return ScopeType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); - } - - private static final ScopeType[] VALUES = { - REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, - }; - - public static ScopeType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private ScopeType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) - } - - public interface FamilyScopeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - boolean hasScopeType(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); - } - public static final class FamilyScope extends - com.google.protobuf.GeneratedMessage - implements FamilyScopeOrBuilder { - // Use FamilyScope.newBuilder() to construct. - private FamilyScope(Builder builder) { - super(builder); - } - private FamilyScope(boolean noInit) {} - - private static final FamilyScope defaultInstance; - public static FamilyScope getDefaultInstance() { - return defaultInstance; - } - - public FamilyScope getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - public static final int SCOPETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_; - public boolean hasScopeType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { - return scopeType_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasScopeType()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, scopeType_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, scopeType_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasScopeType() == other.hasScopeType()); - if (hasScopeType()) { - result = result && - (getScopeType() == other.getScopeType()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasScopeType()) { - hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getScopeType()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.scopeType_ = scopeType_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasScopeType()) { - setScopeType(other.getScopeType()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - if (!hasScopeType()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - scopeType_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - public boolean hasScopeType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { - return scopeType_; - } - public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - scopeType_ = value; - onChanged(); - return this; - } - public Builder clearScopeType() { - bitField0_ = (bitField0_ & ~0x00000002); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) - } - - static { - defaultInstance = new FamilyScope(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) - } - - // repeated bytes keyValueBytes = 1; - public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; - private java.util.List keyValueBytes_; - public java.util.List - getKeyValueBytesList() { - return keyValueBytes_; - } - public int getKeyValueBytesCount() { - return keyValueBytes_.size(); - } - public com.google.protobuf.ByteString getKeyValueBytes(int index) { - return keyValueBytes_.get(index); - } - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - public static final int FAMILYSCOPE_FIELD_NUMBER = 2; - private java.util.List familyScope_; - public java.util.List getFamilyScopeList() { - return familyScope_; - } - public java.util.List - getFamilyScopeOrBuilderList() { - return familyScope_; - } - public int getFamilyScopeCount() { - return familyScope_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { - return familyScope_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index) { - return familyScope_.get(index); - } - - private void initFields() { - keyValueBytes_ = java.util.Collections.emptyList();; - familyScope_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getFamilyScopeCount(); i++) { - if (!getFamilyScope(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < keyValueBytes_.size(); i++) { - output.writeBytes(1, keyValueBytes_.get(i)); - } - for (int i = 0; i < familyScope_.size(); i++) { - output.writeMessage(2, familyScope_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < keyValueBytes_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(keyValueBytes_.get(i)); - } - size += dataSize; - size += 1 * getKeyValueBytesList().size(); - } - for (int i = 0; i < familyScope_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, familyScope_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj; - - boolean result = true; - result = result && getKeyValueBytesList() - .equals(other.getKeyValueBytesList()); - result = result && getFamilyScopeList() - .equals(other.getFamilyScopeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getKeyValueBytesCount() > 0) { - hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; - hash = (53 * hash) + getKeyValueBytesList().hashCode(); - } - if (getFamilyScopeCount() > 0) { - hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER; - hash = (53 * hash) + getFamilyScopeList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFamilyScopeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - keyValueBytes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - if (familyScopeBuilder_ == null) { - familyScope_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - familyScopeBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.keyValueBytes_ = keyValueBytes_; - if (familyScopeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = java.util.Collections.unmodifiableList(familyScope_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.familyScope_ = familyScope_; - } else { - result.familyScope_ = familyScopeBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; - if (!other.keyValueBytes_.isEmpty()) { - if (keyValueBytes_.isEmpty()) { - keyValueBytes_ = other.keyValueBytes_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.addAll(other.keyValueBytes_); - } - onChanged(); - } - if (familyScopeBuilder_ == null) { - if (!other.familyScope_.isEmpty()) { - if (familyScope_.isEmpty()) { - familyScope_ = other.familyScope_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFamilyScopeIsMutable(); - familyScope_.addAll(other.familyScope_); - } - onChanged(); - } - } else { - if (!other.familyScope_.isEmpty()) { - if (familyScopeBuilder_.isEmpty()) { - familyScopeBuilder_.dispose(); - familyScopeBuilder_ = null; - familyScope_ = other.familyScope_; - bitField0_ = (bitField0_ & ~0x00000002); - familyScopeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFamilyScopeFieldBuilder() : null; - } else { - familyScopeBuilder_.addAllMessages(other.familyScope_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getFamilyScopeCount(); i++) { - if (!getFamilyScope(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(input.readBytes()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyScope(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes keyValueBytes = 1; - private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; - private void ensureKeyValueBytesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getKeyValueBytesList() { - return java.util.Collections.unmodifiableList(keyValueBytes_); - } - public int getKeyValueBytesCount() { - return keyValueBytes_.size(); - } - public com.google.protobuf.ByteString getKeyValueBytes(int index) { - return keyValueBytes_.get(index); - } - public Builder setKeyValueBytes( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueBytesIsMutable(); - keyValueBytes_.set(index, value); - onChanged(); - return this; - } - public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(value); - onChanged(); - return this; - } - public Builder addAllKeyValueBytes( - java.lang.Iterable values) { - ensureKeyValueBytesIsMutable(); - super.addAll(values, keyValueBytes_); - onChanged(); - return this; - } - public Builder clearKeyValueBytes() { - keyValueBytes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - private java.util.List familyScope_ = - java.util.Collections.emptyList(); - private void ensureFamilyScopeIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = new java.util.ArrayList(familyScope_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; - - public java.util.List getFamilyScopeList() { - if (familyScopeBuilder_ == null) { - return java.util.Collections.unmodifiableList(familyScope_); - } else { - return familyScopeBuilder_.getMessageList(); - } - } - public int getFamilyScopeCount() { - if (familyScopeBuilder_ == null) { - return familyScope_.size(); - } else { - return familyScopeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { - if (familyScopeBuilder_ == null) { - return familyScope_.get(index); - } else { - return familyScopeBuilder_.getMessage(index); - } - } - public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.set(index, value); - onChanged(); - } else { - familyScopeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.set(index, builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.add(value); - onChanged(); - } else { - familyScopeBuilder_.addMessage(value); - } - return this; - } - public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.add(index, value); - onChanged(); - } else { - familyScopeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFamilyScope( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.add(builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.add(index, builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFamilyScope( - java.lang.Iterable values) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - super.addAll(values, familyScope_); - onChanged(); - } else { - familyScopeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFamilyScope() { - if (familyScopeBuilder_ == null) { - familyScope_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - familyScopeBuilder_.clear(); - } - return this; - } - public Builder removeFamilyScope(int index) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.remove(index); - onChanged(); - } else { - familyScopeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( - int index) { - return getFamilyScopeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index) { - if (familyScopeBuilder_ == null) { - return familyScope_.get(index); } else { - return familyScopeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFamilyScopeOrBuilderList() { - if (familyScopeBuilder_ != null) { - return familyScopeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(familyScope_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { - return getFamilyScopeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( - int index) { - return getFamilyScopeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); - } - public java.util.List - getFamilyScopeBuilderList() { - return getFamilyScopeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> - getFamilyScopeFieldBuilder() { - if (familyScopeBuilder_ == null) { - familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( - familyScope_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - familyScope_ = null; - } - return familyScopeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) - } - - static { - defaultInstance = new WALEdit(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) - } - - private int bitField0_; - // required .WALEntry.WALKey key = 1; - public static final int KEY_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_; - public boolean hasKey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { - return key_; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { - return key_; - } - - // required .WALEntry.WALEdit edit = 2; - public static final int EDIT_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_; - public boolean hasEdit() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { - return edit_; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { - return edit_; - } - - private void initFields() { - key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasKey()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasEdit()) { - memoizedIsInitialized = 0; - return false; - } - if (!getKey().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getEdit().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, key_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, edit_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, key_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, edit_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; - - boolean result = true; - result = result && (hasKey() == other.hasKey()); - if (hasKey()) { - result = result && getKey() - .equals(other.getKey()); - } - result = result && (hasEdit() == other.hasEdit()); - if (hasEdit()) { - result = result && getEdit() - .equals(other.getEdit()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasKey()) { - hash = (37 * hash) + KEY_FIELD_NUMBER; - hash = (53 * hash) + getKey().hashCode(); - } - if (hasEdit()) { - hash = (37 * hash) + EDIT_FIELD_NUMBER; - hash = (53 * hash) + getEdit().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getKeyFieldBuilder(); - getEditFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - } else { - keyBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } else { - editBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (keyBuilder_ == null) { - result.key_ = key_; - } else { - result.key_ = keyBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (editBuilder_ == null) { - result.edit_ = edit_; - } else { - result.edit_ = editBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this; - if (other.hasKey()) { - mergeKey(other.getKey()); - } - if (other.hasEdit()) { - mergeEdit(other.getEdit()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasKey()) { - - return false; - } - if (!hasEdit()) { - - return false; - } - if (!getKey().isInitialized()) { - - return false; - } - if (!getEdit().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(); - if (hasKey()) { - subBuilder.mergeFrom(getKey()); - } - input.readMessage(subBuilder, extensionRegistry); - setKey(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(); - if (hasEdit()) { - subBuilder.mergeFrom(getEdit()); - } - input.readMessage(subBuilder, extensionRegistry); - setEdit(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .WALEntry.WALKey key = 1; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> keyBuilder_; - public boolean hasKey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { - if (keyBuilder_ == null) { - return key_; - } else { - return keyBuilder_.getMessage(); - } - } - public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { - if (keyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - key_ = value; - onChanged(); - } else { - keyBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setKey( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) { - if (keyBuilder_ == null) { - key_ = builderForValue.build(); - onChanged(); - } else { - keyBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { - if (keyBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - key_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) { - key_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(key_).mergeFrom(value).buildPartial(); - } else { - key_ = value; - } - onChanged(); - } else { - keyBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearKey() { - if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - onChanged(); - } else { - keyBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getKeyBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getKeyFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { - if (keyBuilder_ != null) { - return keyBuilder_.getMessageOrBuilder(); - } else { - return key_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> - getKeyFieldBuilder() { - if (keyBuilder_ == null) { - keyBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>( - key_, - getParentForChildren(), - isClean()); - key_ = null; - } - return keyBuilder_; - } - - // required .WALEntry.WALEdit edit = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; - public boolean hasEdit() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { - if (editBuilder_ == null) { - return edit_; - } else { - return editBuilder_.getMessage(); - } - } - public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { - if (editBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - edit_ = value; - onChanged(); - } else { - editBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setEdit( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) { - if (editBuilder_ == null) { - edit_ = builderForValue.build(); - onChanged(); - } else { - editBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { - if (editBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - edit_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) { - edit_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); - } else { - edit_ = value; - } - onChanged(); - } else { - editBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearEdit() { - if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - onChanged(); - } else { - editBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getEditFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { - if (editBuilder_ != null) { - return editBuilder_.getMessageOrBuilder(); - } else { - return edit_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> - getEditFieldBuilder() { - if (editBuilder_ == null) { - editBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>( - edit_, - getParentForChildren(), - isClean()); - edit_ = null; - } - return editBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry) - } - - static { - defaultInstance = new WALEntry(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry) - } - - public interface ReplicateWALEntryRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .WALEntry entry = 1; - java.util.List - getEntryList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index); - int getEntryCount(); - java.util.List - getEntryOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( - int index); - } - public static final class ReplicateWALEntryRequest extends - com.google.protobuf.GeneratedMessage - implements ReplicateWALEntryRequestOrBuilder { - // Use ReplicateWALEntryRequest.newBuilder() to construct. - private ReplicateWALEntryRequest(Builder builder) { - super(builder); - } - private ReplicateWALEntryRequest(boolean noInit) {} - - private static final ReplicateWALEntryRequest defaultInstance; - public static ReplicateWALEntryRequest getDefaultInstance() { - return defaultInstance; - } - - public ReplicateWALEntryRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; - } - - // repeated .WALEntry entry = 1; - public static final int ENTRY_FIELD_NUMBER = 1; - private java.util.List entry_; - public java.util.List getEntryList() { - return entry_; - } - public java.util.List - getEntryOrBuilderList() { - return entry_; - } - public int getEntryCount() { - return entry_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) { - return entry_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( - int index) { - return entry_.get(index); - } - - private void initFields() { - entry_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getEntryCount(); i++) { - if (!getEntry(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < entry_.size(); i++) { - output.writeMessage(1, entry_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < entry_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, entry_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj; - - boolean result = true; - result = result && getEntryList() - .equals(other.getEntryList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getEntryCount() > 0) { - hash = (37 * hash) + ENTRY_FIELD_NUMBER; - hash = (53 * hash) + getEntryList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getEntryFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (entryBuilder_ == null) { - entry_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - entryBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this); - int from_bitField0_ = bitField0_; - if (entryBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - entry_ = java.util.Collections.unmodifiableList(entry_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.entry_ = entry_; - } else { - result.entry_ = entryBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; - if (entryBuilder_ == null) { - if (!other.entry_.isEmpty()) { - if (entry_.isEmpty()) { - entry_ = other.entry_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureEntryIsMutable(); - entry_.addAll(other.entry_); - } - onChanged(); - } - } else { - if (!other.entry_.isEmpty()) { - if (entryBuilder_.isEmpty()) { - entryBuilder_.dispose(); - entryBuilder_ = null; - entry_ = other.entry_; - bitField0_ = (bitField0_ & ~0x00000001); - entryBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getEntryFieldBuilder() : null; - } else { - entryBuilder_.addAllMessages(other.entry_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getEntryCount(); i++) { - if (!getEntry(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addEntry(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .WALEntry entry = 1; - private java.util.List entry_ = - java.util.Collections.emptyList(); - private void ensureEntryIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - entry_ = new java.util.ArrayList(entry_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> entryBuilder_; - - public java.util.List getEntryList() { - if (entryBuilder_ == null) { - return java.util.Collections.unmodifiableList(entry_); - } else { - return entryBuilder_.getMessageList(); - } - } - public int getEntryCount() { - if (entryBuilder_ == null) { - return entry_.size(); - } else { - return entryBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) { - if (entryBuilder_ == null) { - return entry_.get(index); - } else { - return entryBuilder_.getMessage(index); - } - } - public Builder setEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { - if (entryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntryIsMutable(); - entry_.set(index, value); - onChanged(); - } else { - entryBuilder_.setMessage(index, value); - } - return this; - } - public Builder setEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { - if (entryBuilder_ == null) { - ensureEntryIsMutable(); - entry_.set(index, builderForValue.build()); - onChanged(); - } else { - entryBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { - if (entryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntryIsMutable(); - entry_.add(value); - onChanged(); - } else { - entryBuilder_.addMessage(value); - } - return this; - } - public Builder addEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { - if (entryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntryIsMutable(); - entry_.add(index, value); - onChanged(); - } else { - entryBuilder_.addMessage(index, value); - } - return this; - } - public Builder addEntry( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { - if (entryBuilder_ == null) { - ensureEntryIsMutable(); - entry_.add(builderForValue.build()); - onChanged(); - } else { - entryBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { - if (entryBuilder_ == null) { - ensureEntryIsMutable(); - entry_.add(index, builderForValue.build()); - onChanged(); - } else { - entryBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllEntry( - java.lang.Iterable values) { - if (entryBuilder_ == null) { - ensureEntryIsMutable(); - super.addAll(values, entry_); - onChanged(); - } else { - entryBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearEntry() { - if (entryBuilder_ == null) { - entry_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - entryBuilder_.clear(); - } - return this; - } - public Builder removeEntry(int index) { - if (entryBuilder_ == null) { - ensureEntryIsMutable(); - entry_.remove(index); - onChanged(); - } else { - entryBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getEntryBuilder( - int index) { - return getEntryFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( - int index) { - if (entryBuilder_ == null) { - return entry_.get(index); } else { - return entryBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getEntryOrBuilderList() { - if (entryBuilder_ != null) { - return entryBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(entry_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder() { - return getEntryFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder( - int index) { - return getEntryFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); - } - public java.util.List - getEntryBuilderList() { - return getEntryFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> - getEntryFieldBuilder() { - if (entryBuilder_ == null) { - entryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>( - entry_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - entry_ = null; - } - return entryBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest) - } - - static { - defaultInstance = new ReplicateWALEntryRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest) - } - - public interface ReplicateWALEntryResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ReplicateWALEntryResponse extends - com.google.protobuf.GeneratedMessage - implements ReplicateWALEntryResponseOrBuilder { - // Use ReplicateWALEntryResponse.newBuilder() to construct. - private ReplicateWALEntryResponse(Builder builder) { - super(builder); - } - private ReplicateWALEntryResponse(boolean noInit) {} - - private static final ReplicateWALEntryResponse defaultInstance; - public static ReplicateWALEntryResponse getDefaultInstance() { - return defaultInstance; - } - - public ReplicateWALEntryResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse) - } - - static { - defaultInstance = new ReplicateWALEntryResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse) - } - - public interface RollWALWriterRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class RollWALWriterRequest extends - com.google.protobuf.GeneratedMessage - implements RollWALWriterRequestOrBuilder { - // Use RollWALWriterRequest.newBuilder() to construct. - private RollWALWriterRequest(Builder builder) { - super(builder); - } - private RollWALWriterRequest(boolean noInit) {} - - private static final RollWALWriterRequest defaultInstance; - public static RollWALWriterRequest getDefaultInstance() { - return defaultInstance; - } - - public RollWALWriterRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:RollWALWriterRequest) - } - - static { - defaultInstance = new RollWALWriterRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RollWALWriterRequest) - } - - public interface RollWALWriterResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes regionToFlush = 1; - java.util.List getRegionToFlushList(); - int getRegionToFlushCount(); - com.google.protobuf.ByteString getRegionToFlush(int index); - } - public static final class RollWALWriterResponse extends - com.google.protobuf.GeneratedMessage - implements RollWALWriterResponseOrBuilder { - // Use RollWALWriterResponse.newBuilder() to construct. - private RollWALWriterResponse(Builder builder) { - super(builder); - } - private RollWALWriterResponse(boolean noInit) {} - - private static final RollWALWriterResponse defaultInstance; - public static RollWALWriterResponse getDefaultInstance() { - return defaultInstance; - } - - public RollWALWriterResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; - } - - // repeated bytes regionToFlush = 1; - public static final int REGIONTOFLUSH_FIELD_NUMBER = 1; - private java.util.List regionToFlush_; - public java.util.List - getRegionToFlushList() { - return regionToFlush_; - } - public int getRegionToFlushCount() { - return regionToFlush_.size(); - } - public com.google.protobuf.ByteString getRegionToFlush(int index) { - return regionToFlush_.get(index); - } - - private void initFields() { - regionToFlush_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < regionToFlush_.size(); i++) { - output.writeBytes(1, regionToFlush_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < regionToFlush_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(regionToFlush_.get(i)); - } - size += dataSize; - size += 1 * getRegionToFlushList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj; - - boolean result = true; - result = result && getRegionToFlushList() - .equals(other.getRegionToFlushList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRegionToFlushCount() > 0) { - hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER; - hash = (53 * hash) + getRegionToFlushList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - regionToFlush_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.regionToFlush_ = regionToFlush_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; - if (!other.regionToFlush_.isEmpty()) { - if (regionToFlush_.isEmpty()) { - regionToFlush_ = other.regionToFlush_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureRegionToFlushIsMutable(); - regionToFlush_.addAll(other.regionToFlush_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureRegionToFlushIsMutable(); - regionToFlush_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes regionToFlush = 1; - private java.util.List regionToFlush_ = java.util.Collections.emptyList();; - private void ensureRegionToFlushIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - regionToFlush_ = new java.util.ArrayList(regionToFlush_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getRegionToFlushList() { - return java.util.Collections.unmodifiableList(regionToFlush_); - } - public int getRegionToFlushCount() { - return regionToFlush_.size(); - } - public com.google.protobuf.ByteString getRegionToFlush(int index) { - return regionToFlush_.get(index); - } - public Builder setRegionToFlush( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionToFlushIsMutable(); - regionToFlush_.set(index, value); - onChanged(); - return this; - } - public Builder addRegionToFlush(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionToFlushIsMutable(); - regionToFlush_.add(value); - onChanged(); - return this; - } - public Builder addAllRegionToFlush( - java.lang.Iterable values) { - ensureRegionToFlushIsMutable(); - super.addAll(values, regionToFlush_); - onChanged(); - return this; - } - public Builder clearRegionToFlush() { - regionToFlush_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RollWALWriterResponse) - } - - static { - defaultInstance = new RollWALWriterResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RollWALWriterResponse) - } - - public interface StopServerRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string reason = 1; - boolean hasReason(); - String getReason(); - } - public static final class StopServerRequest extends - com.google.protobuf.GeneratedMessage - implements StopServerRequestOrBuilder { - // Use StopServerRequest.newBuilder() to construct. - private StopServerRequest(Builder builder) { - super(builder); - } - private StopServerRequest(boolean noInit) {} - - private static final StopServerRequest defaultInstance; - public static StopServerRequest getDefaultInstance() { - return defaultInstance; - } - - public StopServerRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; - } - - private int bitField0_; - // required string reason = 1; - public static final int REASON_FIELD_NUMBER = 1; - private java.lang.Object reason_; - public boolean hasReason() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getReason() { - java.lang.Object ref = reason_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - reason_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getReasonBytes() { - java.lang.Object ref = reason_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - reason_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - reason_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasReason()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getReasonBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getReasonBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj; - - boolean result = true; - result = result && (hasReason() == other.hasReason()); - if (hasReason()) { - result = result && getReason() - .equals(other.getReason()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasReason()) { - hash = (37 * hash) + REASON_FIELD_NUMBER; - hash = (53 * hash) + getReason().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - reason_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.reason_ = reason_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this; - if (other.hasReason()) { - setReason(other.getReason()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasReason()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - reason_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string reason = 1; - private java.lang.Object reason_ = ""; - public boolean hasReason() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getReason() { - java.lang.Object ref = reason_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - reason_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setReason(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - reason_ = value; - onChanged(); - return this; - } - public Builder clearReason() { - bitField0_ = (bitField0_ & ~0x00000001); - reason_ = getDefaultInstance().getReason(); - onChanged(); - return this; - } - void setReason(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - reason_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:StopServerRequest) - } - - static { - defaultInstance = new StopServerRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:StopServerRequest) - } - - public interface StopServerResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class StopServerResponse extends - com.google.protobuf.GeneratedMessage - implements StopServerResponseOrBuilder { - // Use StopServerResponse.newBuilder() to construct. - private StopServerResponse(Builder builder) { - super(builder); - } - private StopServerResponse(boolean noInit) {} - - private static final StopServerResponse defaultInstance; - public static StopServerResponse getDefaultInstance() { - return defaultInstance; - } - - public StopServerResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:StopServerResponse) - } - - static { - defaultInstance = new StopServerResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:StopServerResponse) - } - - public interface GetServerInfoRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class GetServerInfoRequest extends - com.google.protobuf.GeneratedMessage - implements GetServerInfoRequestOrBuilder { - // Use GetServerInfoRequest.newBuilder() to construct. - private GetServerInfoRequest(Builder builder) { - super(builder); - } - private GetServerInfoRequest(boolean noInit) {} - - private static final GetServerInfoRequest defaultInstance; - public static GetServerInfoRequest getDefaultInstance() { - return defaultInstance; - } - - public GetServerInfoRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:GetServerInfoRequest) - } - - static { - defaultInstance = new GetServerInfoRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetServerInfoRequest) - } - - public interface ServerInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName serverName = 1; - boolean hasServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - - // optional uint32 webuiPort = 2; - boolean hasWebuiPort(); - int getWebuiPort(); - } - public static final class ServerInfo extends - com.google.protobuf.GeneratedMessage - implements ServerInfoOrBuilder { - // Use ServerInfo.newBuilder() to construct. - private ServerInfo(Builder builder) { - super(builder); - } - private ServerInfo(boolean noInit) {} - - private static final ServerInfo defaultInstance; - public static ServerInfo getDefaultInstance() { - return defaultInstance; - } - - public ServerInfo getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName serverName = 1; - public static final int SERVERNAME_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; - } - - // optional uint32 webuiPort = 2; - public static final int WEBUIPORT_FIELD_NUMBER = 2; - private int webuiPort_; - public boolean hasWebuiPort() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getWebuiPort() { - return webuiPort_; - } - - private void initFields() { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - webuiPort_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServerName()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServerName().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, serverName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, webuiPort_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, serverName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, webuiPort_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) obj; - - boolean result = true; - result = result && (hasServerName() == other.hasServerName()); - if (hasServerName()) { - result = result && getServerName() - .equals(other.getServerName()); - } - result = result && (hasWebuiPort() == other.hasWebuiPort()); - if (hasWebuiPort()) { - result = result && (getWebuiPort() - == other.getWebuiPort()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServerName()) { - hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; - hash = (53 * hash) + getServerName().hashCode(); - } - if (hasWebuiPort()) { - hash = (37 * hash) + WEBUIPORT_FIELD_NUMBER; - hash = (53 * hash) + getWebuiPort(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerNameFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - webuiPort_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverNameBuilder_ == null) { - result.serverName_ = serverName_; - } else { - result.serverName_ = serverNameBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.webuiPort_ = webuiPort_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) return this; - if (other.hasServerName()) { - mergeServerName(other.getServerName()); - } - if (other.hasWebuiPort()) { - setWebuiPort(other.getWebuiPort()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServerName()) { - - return false; - } - if (!getServerName().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - webuiPort_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName serverName = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - if (serverNameBuilder_ == null) { - return serverName_; - } else { - return serverNameBuilder_.getMessage(); - } - } - public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - serverName_ = value; - onChanged(); - } else { - serverNameBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServerName( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverNameBuilder_ == null) { - serverName_ = builderForValue.build(); - onChanged(); - } else { - serverNameBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - serverName_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); - } else { - serverName_ = value; - } - onChanged(); - } else { - serverNameBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServerName() { - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerNameFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - if (serverNameBuilder_ != null) { - return serverNameBuilder_.getMessageOrBuilder(); - } else { - return serverName_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerNameFieldBuilder() { - if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, - getParentForChildren(), - isClean()); - serverName_ = null; - } - return serverNameBuilder_; - } - - // optional uint32 webuiPort = 2; - private int webuiPort_ ; - public boolean hasWebuiPort() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getWebuiPort() { - return webuiPort_; - } - public Builder setWebuiPort(int value) { - bitField0_ |= 0x00000002; - webuiPort_ = value; - onChanged(); - return this; - } - public Builder clearWebuiPort() { - bitField0_ = (bitField0_ & ~0x00000002); - webuiPort_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ServerInfo) - } - - static { - defaultInstance = new ServerInfo(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ServerInfo) - } - - public interface GetServerInfoResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerInfo serverInfo = 1; - boolean hasServerInfo(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder(); - } - public static final class GetServerInfoResponse extends - com.google.protobuf.GeneratedMessage - implements GetServerInfoResponseOrBuilder { - // Use GetServerInfoResponse.newBuilder() to construct. - private GetServerInfoResponse(Builder builder) { - super(builder); - } - private GetServerInfoResponse(boolean noInit) {} - - private static final GetServerInfoResponse defaultInstance; - public static GetServerInfoResponse getDefaultInstance() { - return defaultInstance; - } - - public GetServerInfoResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerInfo serverInfo = 1; - public static final int SERVERINFO_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_; - public boolean hasServerInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { - return serverInfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { - return serverInfo_; - } - - private void initFields() { - serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServerInfo()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServerInfo().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, serverInfo_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, serverInfo_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj; - - boolean result = true; - result = result && (hasServerInfo() == other.hasServerInfo()); - if (hasServerInfo()) { - result = result && getServerInfo() - .equals(other.getServerInfo()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServerInfo()) { - hash = (37 * hash) + SERVERINFO_FIELD_NUMBER; - hash = (53 * hash) + getServerInfo().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverInfoBuilder_ == null) { - serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - } else { - serverInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverInfoBuilder_ == null) { - result.serverInfo_ = serverInfo_; - } else { - result.serverInfo_ = serverInfoBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; - if (other.hasServerInfo()) { - mergeServerInfo(other.getServerInfo()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServerInfo()) { - - return false; - } - if (!getServerInfo().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(); - if (hasServerInfo()) { - subBuilder.mergeFrom(getServerInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerInfo(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerInfo serverInfo = 1; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_; - public boolean hasServerInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { - if (serverInfoBuilder_ == null) { - return serverInfo_; - } else { - return serverInfoBuilder_.getMessage(); - } - } - public Builder setServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) { - if (serverInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - serverInfo_ = value; - onChanged(); - } else { - serverInfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServerInfo( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder builderForValue) { - if (serverInfoBuilder_ == null) { - serverInfo_ = builderForValue.build(); - onChanged(); - } else { - serverInfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) { - if (serverInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - serverInfo_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) { - serverInfo_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(serverInfo_).mergeFrom(value).buildPartial(); - } else { - serverInfo_ = value; - } - onChanged(); - } else { - serverInfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServerInfo() { - if (serverInfoBuilder_ == null) { - serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - onChanged(); - } else { - serverInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder getServerInfoBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerInfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { - if (serverInfoBuilder_ != null) { - return serverInfoBuilder_.getMessageOrBuilder(); - } else { - return serverInfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> - getServerInfoFieldBuilder() { - if (serverInfoBuilder_ == null) { - serverInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder>( - serverInfo_, - getParentForChildren(), - isClean()); - serverInfo_ = null; - } - return serverInfoBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetServerInfoResponse) - } - - static { - defaultInstance = new GetServerInfoResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetServerInfoResponse) - } - - public static abstract class AdminService - implements com.google.protobuf.Service { - protected AdminService() {} - - public interface Interface { - public abstract void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getStoreFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new AdminService() { - @java.lang.Override - public void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done) { - impl.getRegionInfo(controller, request, done); - } - - @java.lang.Override - public void getStoreFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, - com.google.protobuf.RpcCallback done) { - impl.getStoreFile(controller, request, done); - } - - @java.lang.Override - public void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.getOnlineRegion(controller, request, done); - } - - @java.lang.Override - public void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.openRegion(controller, request, done); - } - - @java.lang.Override - public void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.closeRegion(controller, request, done); - } - - @java.lang.Override - public void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.flushRegion(controller, request, done); - } - - @java.lang.Override - public void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.splitRegion(controller, request, done); - } - - @java.lang.Override - public void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.compactRegion(controller, request, done); - } - - @java.lang.Override - public void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done) { - impl.replicateWALEntry(controller, request, done); - } - - @java.lang.Override - public void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done) { - impl.rollWALWriter(controller, request, done); - } - - @java.lang.Override - public void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done) { - impl.getServerInfo(controller, request, done); - } - - @java.lang.Override - public void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done) { - impl.stopServer(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request); - case 1: - return impl.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request); - case 2: - return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request); - case 3: - return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request); - case 4: - return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request); - case 5: - return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request); - case 6: - return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request); - case 7: - return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request); - case 8: - return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request); - case 9: - return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request); - case 10: - return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request); - case 11: - return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getStoreFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 7: - this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 8: - this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 9: - this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 10: - this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 11: - this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance())); - } - - public void getStoreFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance())); - } - - public void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance())); - } - - public void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance())); - } - - public void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance())); - } - - public void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance())); - } - - public void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance())); - } - - public void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance())); - } - - public void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); - } - - public void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(9), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance())); - } - - public void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(10), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance())); - } - - public void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(11), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(9), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(10), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(11), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetRegionInfoRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetRegionInfoRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetRegionInfoResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetRegionInfoResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetStoreFileRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetStoreFileRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetStoreFileResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetStoreFileResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetOnlineRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetOnlineRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetOnlineRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetOnlineRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OpenRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OpenRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OpenRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OpenRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CloseRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CloseRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CloseRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CloseRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FlushRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FlushRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FlushRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FlushRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SplitRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SplitRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SplitRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SplitRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CompactRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CompactRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CompactRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CompactRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UUID_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UUID_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALKey_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALKey_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALEdit_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALEdit_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicateWALEntryRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicateWALEntryRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicateWALEntryResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicateWALEntryResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RollWALWriterRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RollWALWriterRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RollWALWriterResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RollWALWriterResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_StopServerRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_StopServerRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_StopServerResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_StopServerResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetServerInfoRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetServerInfoRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ServerInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ServerInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetServerInfoResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetServerInfoResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\013Admin.proto\032\013hbase.proto\"Q\n\024GetRegionI" + - "nfoRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + - "fier\022\027\n\017compactionState\030\002 \001(\010\"\301\001\n\025GetReg" + - "ionInfoResponse\022\037\n\nregionInfo\030\001 \002(\0132\013.Re" + - "gionInfo\022?\n\017compactionState\030\002 \001(\0162&.GetR" + - "egionInfoResponse.CompactionState\"F\n\017Com" + - "pactionState\022\010\n\004NONE\020\000\022\t\n\005MINOR\020\001\022\t\n\005MAJ" + - "OR\020\002\022\023\n\017MAJOR_AND_MINOR\020\003\"G\n\023GetStoreFil" + - "eRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + - "er\022\016\n\006family\030\002 \003(\014\")\n\024GetStoreFileRespon", - "se\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetOnlineRegion" + - "Request\":\n\027GetOnlineRegionResponse\022\037\n\nre" + - "gionInfo\030\001 \003(\0132\013.RegionInfo\"\225\001\n\021OpenRegi" + - "onRequest\0223\n\010openInfo\030\001 \003(\0132!.OpenRegion" + - "Request.RegionOpenInfo\032K\n\016RegionOpenInfo" + - "\022\033\n\006region\030\001 \002(\0132\013.RegionInfo\022\034\n\024version" + - "OfOfflineNode\030\002 \001(\r\"\234\001\n\022OpenRegionRespon" + - "se\022<\n\014openingState\030\001 \003(\0162&.OpenRegionRes" + - "ponse.RegionOpeningState\"H\n\022RegionOpenin" + - "gState\022\n\n\006OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022", - "\n\016FAILED_OPENING\020\002\"\232\001\n\022CloseRegionReques" + - "t\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024v" + - "ersionOfClosingNode\030\002 \001(\r\022\034\n\016transitionI" + - "nZK\030\003 \001(\010:\004true\022&\n\021destinationServer\030\004 \001" + - "(\0132\013.ServerName\"%\n\023CloseRegionResponse\022\016" + - "\n\006closed\030\001 \002(\010\"M\n\022FlushRegionRequest\022 \n\006" + - "region\030\001 \002(\0132\020.RegionSpecifier\022\025\n\rifOlde" + - "rThanTs\030\002 \001(\004\"=\n\023FlushRegionResponse\022\025\n\r" + - "lastFlushTime\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022" + - "SplitRegionRequest\022 \n\006region\030\001 \002(\0132\020.Reg", - "ionSpecifier\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023Spli" + - "tRegionResponse\"W\n\024CompactRegionRequest\022" + - " \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\r\n\005maj" + - "or\030\002 \001(\010\022\016\n\006family\030\003 \001(\014\"\027\n\025CompactRegio" + - "nResponse\"1\n\004UUID\022\024\n\014leastSigBits\030\001 \002(\004\022" + - "\023\n\013mostSigBits\030\002 \002(\004\"\270\003\n\010WALEntry\022\035\n\003key" + - "\030\001 \002(\0132\020.WALEntry.WALKey\022\037\n\004edit\030\002 \002(\0132\021" + - ".WALEntry.WALEdit\032~\n\006WALKey\022\031\n\021encodedRe" + - "gionName\030\001 \002(\014\022\021\n\ttableName\030\002 \002(\014\022\031\n\021log" + - "SequenceNumber\030\003 \002(\004\022\021\n\twriteTime\030\004 \002(\004\022", - "\030\n\tclusterId\030\005 \001(\0132\005.UUID\032\353\001\n\007WALEdit\022\025\n" + - "\rkeyValueBytes\030\001 \003(\014\0222\n\013familyScope\030\002 \003(" + - "\0132\035.WALEntry.WALEdit.FamilyScope\032M\n\013Fami" + - "lyScope\022\016\n\006family\030\001 \002(\014\022.\n\tscopeType\030\002 \002" + - "(\0162\033.WALEntry.WALEdit.ScopeType\"F\n\tScope" + - "Type\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030REP" + - "LICATION_SCOPE_GLOBAL\020\001\"4\n\030ReplicateWALE" + - "ntryRequest\022\030\n\005entry\030\001 \003(\0132\t.WALEntry\"\033\n" + - "\031ReplicateWALEntryResponse\"\026\n\024RollWALWri" + - "terRequest\".\n\025RollWALWriterResponse\022\025\n\rr", - "egionToFlush\030\001 \003(\014\"#\n\021StopServerRequest\022" + - "\016\n\006reason\030\001 \002(\t\"\024\n\022StopServerResponse\"\026\n" + - "\024GetServerInfoRequest\"@\n\nServerInfo\022\037\n\ns" + - "erverName\030\001 \002(\0132\013.ServerName\022\021\n\twebuiPor" + - "t\030\002 \001(\r\"8\n\025GetServerInfoResponse\022\037\n\nserv" + - "erInfo\030\001 \002(\0132\013.ServerInfo2\371\005\n\014AdminServi" + - "ce\022>\n\rgetRegionInfo\022\025.GetRegionInfoReque" + - "st\032\026.GetRegionInfoResponse\022;\n\014getStoreFi" + - "le\022\024.GetStoreFileRequest\032\025.GetStoreFileR" + - "esponse\022D\n\017getOnlineRegion\022\027.GetOnlineRe", - "gionRequest\032\030.GetOnlineRegionResponse\0225\n" + - "\nopenRegion\022\022.OpenRegionRequest\032\023.OpenRe" + - "gionResponse\0228\n\013closeRegion\022\023.CloseRegio" + - "nRequest\032\024.CloseRegionResponse\0228\n\013flushR" + - "egion\022\023.FlushRegionRequest\032\024.FlushRegion" + - "Response\0228\n\013splitRegion\022\023.SplitRegionReq" + - "uest\032\024.SplitRegionResponse\022>\n\rcompactReg" + - "ion\022\025.CompactRegionRequest\032\026.CompactRegi" + - "onResponse\022J\n\021replicateWALEntry\022\031.Replic" + - "ateWALEntryRequest\032\032.ReplicateWALEntryRe", - "sponse\022>\n\rrollWALWriter\022\025.RollWALWriterR" + - "equest\032\026.RollWALWriterResponse\022>\n\rgetSer" + - "verInfo\022\025.GetServerInfoRequest\032\026.GetServ" + - "erInfoResponse\0225\n\nstopServer\022\022.StopServe" + - "rRequest\032\023.StopServerResponseBA\n*org.apa" + - "che.hadoop.hbase.protobuf.generatedB\013Adm" + - "inProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_GetRegionInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_GetRegionInfoRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetRegionInfoRequest_descriptor, - new java.lang.String[] { "Region", "CompactionState", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); - internal_static_GetRegionInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_GetRegionInfoResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetRegionInfoResponse_descriptor, - new java.lang.String[] { "RegionInfo", "CompactionState", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); - internal_static_GetStoreFileRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_GetStoreFileRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetStoreFileRequest_descriptor, - new java.lang.String[] { "Region", "Family", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); - internal_static_GetStoreFileResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_GetStoreFileResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetStoreFileResponse_descriptor, - new java.lang.String[] { "StoreFile", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); - internal_static_GetOnlineRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_GetOnlineRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetOnlineRegionRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); - internal_static_GetOnlineRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_GetOnlineRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetOnlineRegionResponse_descriptor, - new java.lang.String[] { "RegionInfo", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); - internal_static_OpenRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_OpenRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OpenRegionRequest_descriptor, - new java.lang.String[] { "OpenInfo", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); - internal_static_OpenRegionRequest_RegionOpenInfo_descriptor = - internal_static_OpenRegionRequest_descriptor.getNestedTypes().get(0); - internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OpenRegionRequest_RegionOpenInfo_descriptor, - new java.lang.String[] { "Region", "VersionOfOfflineNode", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); - internal_static_OpenRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_OpenRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OpenRegionResponse_descriptor, - new java.lang.String[] { "OpeningState", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); - internal_static_CloseRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_CloseRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CloseRegionRequest_descriptor, - new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); - internal_static_CloseRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_CloseRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CloseRegionResponse_descriptor, - new java.lang.String[] { "Closed", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); - internal_static_FlushRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_FlushRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FlushRegionRequest_descriptor, - new java.lang.String[] { "Region", "IfOlderThanTs", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); - internal_static_FlushRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_FlushRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FlushRegionResponse_descriptor, - new java.lang.String[] { "LastFlushTime", "Flushed", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); - internal_static_SplitRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_SplitRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SplitRegionRequest_descriptor, - new java.lang.String[] { "Region", "SplitPoint", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); - internal_static_SplitRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_SplitRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SplitRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); - internal_static_CompactRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_CompactRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CompactRegionRequest_descriptor, - new java.lang.String[] { "Region", "Major", "Family", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); - internal_static_CompactRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_CompactRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CompactRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); - internal_static_UUID_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_UUID_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UUID_descriptor, - new java.lang.String[] { "LeastSigBits", "MostSigBits", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); - internal_static_WALEntry_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_WALEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_descriptor, - new java.lang.String[] { "Key", "Edit", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); - internal_static_WALEntry_WALKey_descriptor = - internal_static_WALEntry_descriptor.getNestedTypes().get(0); - internal_static_WALEntry_WALKey_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALKey_descriptor, - new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); - internal_static_WALEntry_WALEdit_descriptor = - internal_static_WALEntry_descriptor.getNestedTypes().get(1); - internal_static_WALEntry_WALEdit_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALEdit_descriptor, - new java.lang.String[] { "KeyValueBytes", "FamilyScope", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); - internal_static_WALEntry_WALEdit_FamilyScope_descriptor = - internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); - internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALEdit_FamilyScope_descriptor, - new java.lang.String[] { "Family", "ScopeType", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); - internal_static_ReplicateWALEntryRequest_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicateWALEntryRequest_descriptor, - new java.lang.String[] { "Entry", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); - internal_static_ReplicateWALEntryResponse_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicateWALEntryResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); - internal_static_RollWALWriterRequest_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_RollWALWriterRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RollWALWriterRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); - internal_static_RollWALWriterResponse_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_RollWALWriterResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RollWALWriterResponse_descriptor, - new java.lang.String[] { "RegionToFlush", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); - internal_static_StopServerRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_StopServerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_StopServerRequest_descriptor, - new java.lang.String[] { "Reason", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); - internal_static_StopServerResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_StopServerResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_StopServerResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); - internal_static_GetServerInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_GetServerInfoRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetServerInfoRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); - internal_static_ServerInfo_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_ServerInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ServerInfo_descriptor, - new java.lang.String[] { "ServerName", "WebuiPort", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class); - internal_static_GetServerInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_GetServerInfoResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetServerInfoResponse_descriptor, - new java.lang.String[] { "ServerInfo", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java deleted file mode 100644 index ad1dbeb..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java +++ /dev/null @@ -1,1829 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Aggregate.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class AggregateProtos { - private AggregateProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface AggregateArgumentOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string interpreterClassName = 1; - boolean hasInterpreterClassName(); - String getInterpreterClassName(); - - // required .Scan scan = 2; - boolean hasScan(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - - // optional bytes interpreterSpecificBytes = 3; - boolean hasInterpreterSpecificBytes(); - com.google.protobuf.ByteString getInterpreterSpecificBytes(); - } - public static final class AggregateArgument extends - com.google.protobuf.GeneratedMessage - implements AggregateArgumentOrBuilder { - // Use AggregateArgument.newBuilder() to construct. - private AggregateArgument(Builder builder) { - super(builder); - } - private AggregateArgument(boolean noInit) {} - - private static final AggregateArgument defaultInstance; - public static AggregateArgument getDefaultInstance() { - return defaultInstance; - } - - public AggregateArgument getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable; - } - - private int bitField0_; - // required string interpreterClassName = 1; - public static final int INTERPRETERCLASSNAME_FIELD_NUMBER = 1; - private java.lang.Object interpreterClassName_; - public boolean hasInterpreterClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getInterpreterClassName() { - java.lang.Object ref = interpreterClassName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - interpreterClassName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getInterpreterClassNameBytes() { - java.lang.Object ref = interpreterClassName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - interpreterClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required .Scan scan = 2; - public static final int SCAN_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - return scan_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // optional bytes interpreterSpecificBytes = 3; - public static final int INTERPRETERSPECIFICBYTES_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString interpreterSpecificBytes_; - public boolean hasInterpreterSpecificBytes() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getInterpreterSpecificBytes() { - return interpreterSpecificBytes_; - } - - private void initFields() { - interpreterClassName_ = ""; - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasInterpreterClassName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasScan()) { - memoizedIsInitialized = 0; - return false; - } - if (!getScan().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getInterpreterClassNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, interpreterSpecificBytes_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getInterpreterClassNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, interpreterSpecificBytes_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) obj; - - boolean result = true; - result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); - if (hasInterpreterClassName()) { - result = result && getInterpreterClassName() - .equals(other.getInterpreterClassName()); - } - result = result && (hasScan() == other.hasScan()); - if (hasScan()) { - result = result && getScan() - .equals(other.getScan()); - } - result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); - if (hasInterpreterSpecificBytes()) { - result = result && getInterpreterSpecificBytes() - .equals(other.getInterpreterSpecificBytes()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasInterpreterClassName()) { - hash = (37 * hash) + INTERPRETERCLASSNAME_FIELD_NUMBER; - hash = (53 * hash) + getInterpreterClassName().hashCode(); - } - if (hasScan()) { - hash = (37 * hash) + SCAN_FIELD_NUMBER; - hash = (53 * hash) + getScan().hashCode(); - } - if (hasInterpreterSpecificBytes()) { - hash = (37 * hash) + INTERPRETERSPECIFICBYTES_FIELD_NUMBER; - hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgumentOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getScanFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - interpreterClassName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument build() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.interpreterClassName_ = interpreterClassName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (scanBuilder_ == null) { - result.scan_ = scan_; - } else { - result.scan_ = scanBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.interpreterSpecificBytes_ = interpreterSpecificBytes_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance()) return this; - if (other.hasInterpreterClassName()) { - setInterpreterClassName(other.getInterpreterClassName()); - } - if (other.hasScan()) { - mergeScan(other.getScan()); - } - if (other.hasInterpreterSpecificBytes()) { - setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasInterpreterClassName()) { - - return false; - } - if (!hasScan()) { - - return false; - } - if (!getScan().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - interpreterClassName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); - if (hasScan()) { - subBuilder.mergeFrom(getScan()); - } - input.readMessage(subBuilder, extensionRegistry); - setScan(subBuilder.buildPartial()); - break; - } - case 26: { - bitField0_ |= 0x00000004; - interpreterSpecificBytes_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string interpreterClassName = 1; - private java.lang.Object interpreterClassName_ = ""; - public boolean hasInterpreterClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getInterpreterClassName() { - java.lang.Object ref = interpreterClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - interpreterClassName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setInterpreterClassName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - interpreterClassName_ = value; - onChanged(); - return this; - } - public Builder clearInterpreterClassName() { - bitField0_ = (bitField0_ & ~0x00000001); - interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); - onChanged(); - return this; - } - void setInterpreterClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - interpreterClassName_ = value; - onChanged(); - } - - // required .Scan scan = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - if (scanBuilder_ == null) { - return scan_; - } else { - return scanBuilder_.getMessage(); - } - } - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - scan_ = value; - onChanged(); - } else { - scanBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { - if (scanBuilder_ == null) { - scan_ = builderForValue.build(); - onChanged(); - } else { - scanBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { - scan_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); - } else { - scan_ = value; - } - onChanged(); - } else { - scanBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearScan() { - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - onChanged(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getScanFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - if (scanBuilder_ != null) { - return scanBuilder_.getMessageOrBuilder(); - } else { - return scan_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> - getScanFieldBuilder() { - if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( - scan_, - getParentForChildren(), - isClean()); - scan_ = null; - } - return scanBuilder_; - } - - // optional bytes interpreterSpecificBytes = 3; - private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasInterpreterSpecificBytes() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getInterpreterSpecificBytes() { - return interpreterSpecificBytes_; - } - public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - interpreterSpecificBytes_ = value; - onChanged(); - return this; - } - public Builder clearInterpreterSpecificBytes() { - bitField0_ = (bitField0_ & ~0x00000004); - interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:AggregateArgument) - } - - static { - defaultInstance = new AggregateArgument(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AggregateArgument) - } - - public interface AggregateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes firstPart = 1; - java.util.List getFirstPartList(); - int getFirstPartCount(); - com.google.protobuf.ByteString getFirstPart(int index); - - // optional bytes secondPart = 2; - boolean hasSecondPart(); - com.google.protobuf.ByteString getSecondPart(); - } - public static final class AggregateResponse extends - com.google.protobuf.GeneratedMessage - implements AggregateResponseOrBuilder { - // Use AggregateResponse.newBuilder() to construct. - private AggregateResponse(Builder builder) { - super(builder); - } - private AggregateResponse(boolean noInit) {} - - private static final AggregateResponse defaultInstance; - public static AggregateResponse getDefaultInstance() { - return defaultInstance; - } - - public AggregateResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable; - } - - private int bitField0_; - // repeated bytes firstPart = 1; - public static final int FIRSTPART_FIELD_NUMBER = 1; - private java.util.List firstPart_; - public java.util.List - getFirstPartList() { - return firstPart_; - } - public int getFirstPartCount() { - return firstPart_.size(); - } - public com.google.protobuf.ByteString getFirstPart(int index) { - return firstPart_.get(index); - } - - // optional bytes secondPart = 2; - public static final int SECONDPART_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString secondPart_; - public boolean hasSecondPart() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getSecondPart() { - return secondPart_; - } - - private void initFields() { - firstPart_ = java.util.Collections.emptyList();; - secondPart_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < firstPart_.size(); i++) { - output.writeBytes(1, firstPart_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(2, secondPart_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < firstPart_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(firstPart_.get(i)); - } - size += dataSize; - size += 1 * getFirstPartList().size(); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, secondPart_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj; - - boolean result = true; - result = result && getFirstPartList() - .equals(other.getFirstPartList()); - result = result && (hasSecondPart() == other.hasSecondPart()); - if (hasSecondPart()) { - result = result && getSecondPart() - .equals(other.getSecondPart()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getFirstPartCount() > 0) { - hash = (37 * hash) + FIRSTPART_FIELD_NUMBER; - hash = (53 * hash) + getFirstPartList().hashCode(); - } - if (hasSecondPart()) { - hash = (37 * hash) + SECONDPART_FIELD_NUMBER; - hash = (53 * hash) + getSecondPart().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - firstPart_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - secondPart_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - firstPart_ = java.util.Collections.unmodifiableList(firstPart_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.firstPart_ = firstPart_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.secondPart_ = secondPart_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this; - if (!other.firstPart_.isEmpty()) { - if (firstPart_.isEmpty()) { - firstPart_ = other.firstPart_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureFirstPartIsMutable(); - firstPart_.addAll(other.firstPart_); - } - onChanged(); - } - if (other.hasSecondPart()) { - setSecondPart(other.getSecondPart()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureFirstPartIsMutable(); - firstPart_.add(input.readBytes()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - secondPart_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes firstPart = 1; - private java.util.List firstPart_ = java.util.Collections.emptyList();; - private void ensureFirstPartIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - firstPart_ = new java.util.ArrayList(firstPart_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getFirstPartList() { - return java.util.Collections.unmodifiableList(firstPart_); - } - public int getFirstPartCount() { - return firstPart_.size(); - } - public com.google.protobuf.ByteString getFirstPart(int index) { - return firstPart_.get(index); - } - public Builder setFirstPart( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureFirstPartIsMutable(); - firstPart_.set(index, value); - onChanged(); - return this; - } - public Builder addFirstPart(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureFirstPartIsMutable(); - firstPart_.add(value); - onChanged(); - return this; - } - public Builder addAllFirstPart( - java.lang.Iterable values) { - ensureFirstPartIsMutable(); - super.addAll(values, firstPart_); - onChanged(); - return this; - } - public Builder clearFirstPart() { - firstPart_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // optional bytes secondPart = 2; - private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSecondPart() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSecondPart() { - return secondPart_; - } - public Builder setSecondPart(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - secondPart_ = value; - onChanged(); - return this; - } - public Builder clearSecondPart() { - bitField0_ = (bitField0_ & ~0x00000002); - secondPart_ = getDefaultInstance().getSecondPart(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:AggregateResponse) - } - - static { - defaultInstance = new AggregateResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AggregateResponse) - } - - public static abstract class AggregateService - implements com.google.protobuf.Service { - protected AggregateService() {} - - public interface Interface { - public abstract void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new AggregateService() { - @java.lang.Override - public void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getMax(controller, request, done); - } - - @java.lang.Override - public void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getMin(controller, request, done); - } - - @java.lang.Override - public void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getSum(controller, request, done); - } - - @java.lang.Override - public void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getRowNum(controller, request, done); - } - - @java.lang.Override - public void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getAvg(controller, request, done); - } - - @java.lang.Override - public void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getStd(controller, request, done); - } - - @java.lang.Override - public void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - impl.getMedian(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - case 1: - return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - case 2: - return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - case 3: - return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - case 4: - return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - case 5: - return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - case 6: - return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public abstract void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AggregateArgument_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AggregateArgument_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AggregateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AggregateResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\017Aggregate.proto\032\014Client.proto\"h\n\021Aggre" + - "gateArgument\022\034\n\024interpreterClassName\030\001 \002" + - "(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022 \n\030interpreterSp" + - "ecificBytes\030\003 \001(\014\":\n\021AggregateResponse\022\021" + - "\n\tfirstPart\030\001 \003(\014\022\022\n\nsecondPart\030\002 \001(\0142\366\002" + - "\n\020AggregateService\0220\n\006getMax\022\022.Aggregate" + - "Argument\032\022.AggregateResponse\0220\n\006getMin\022\022" + - ".AggregateArgument\032\022.AggregateResponse\0220" + - "\n\006getSum\022\022.AggregateArgument\032\022.Aggregate" + - "Response\0223\n\tgetRowNum\022\022.AggregateArgumen", - "t\032\022.AggregateResponse\0220\n\006getAvg\022\022.Aggreg" + - "ateArgument\032\022.AggregateResponse\0220\n\006getSt" + - "d\022\022.AggregateArgument\032\022.AggregateRespons" + - "e\0223\n\tgetMedian\022\022.AggregateArgument\032\022.Agg" + - "regateResponseBE\n*org.apache.hadoop.hbas" + - "e.protobuf.generatedB\017AggregateProtosH\001\210" + - "\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_AggregateArgument_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_AggregateArgument_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AggregateArgument_descriptor, - new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", }, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class); - internal_static_AggregateResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_AggregateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AggregateResponse_descriptor, - new java.lang.String[] { "FirstPart", "SecondPart", }, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java deleted file mode 100644 index 05a43f6..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java +++ /dev/null @@ -1,24104 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Client.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class ClientProtos { - private ClientProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ColumnOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // repeated bytes qualifier = 2; - java.util.List getQualifierList(); - int getQualifierCount(); - com.google.protobuf.ByteString getQualifier(int index); - } - public static final class Column extends - com.google.protobuf.GeneratedMessage - implements ColumnOrBuilder { - // Use Column.newBuilder() to construct. - private Column(Builder builder) { - super(builder); - } - private Column(boolean noInit) {} - - private static final Column defaultInstance; - public static Column getDefaultInstance() { - return defaultInstance; - } - - public Column getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // repeated bytes qualifier = 2; - public static final int QUALIFIER_FIELD_NUMBER = 2; - private java.util.List qualifier_; - public java.util.List - getQualifierList() { - return qualifier_; - } - public int getQualifierCount() { - return qualifier_.size(); - } - public com.google.protobuf.ByteString getQualifier(int index) { - return qualifier_.get(index); - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - for (int i = 0; i < qualifier_.size(); i++) { - output.writeBytes(2, qualifier_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - { - int dataSize = 0; - for (int i = 0; i < qualifier_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(qualifier_.get(i)); - } - size += dataSize; - size += 1 * getQualifierList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && getQualifierList() - .equals(other.getQualifierList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (getQualifierCount() > 0) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifierList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - qualifier_ = java.util.Collections.unmodifiableList(qualifier_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.qualifier_ = qualifier_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (!other.qualifier_.isEmpty()) { - if (qualifier_.isEmpty()) { - qualifier_ = other.qualifier_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureQualifierIsMutable(); - qualifier_.addAll(other.qualifier_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - ensureQualifierIsMutable(); - qualifier_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // repeated bytes qualifier = 2; - private java.util.List qualifier_ = java.util.Collections.emptyList();; - private void ensureQualifierIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - qualifier_ = new java.util.ArrayList(qualifier_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getQualifierList() { - return java.util.Collections.unmodifiableList(qualifier_); - } - public int getQualifierCount() { - return qualifier_.size(); - } - public com.google.protobuf.ByteString getQualifier(int index) { - return qualifier_.get(index); - } - public Builder setQualifier( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierIsMutable(); - qualifier_.set(index, value); - onChanged(); - return this; - } - public Builder addQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierIsMutable(); - qualifier_.add(value); - onChanged(); - return this; - } - public Builder addAllQualifier( - java.lang.Iterable values) { - ensureQualifierIsMutable(); - super.addAll(values, qualifier_); - onChanged(); - return this; - } - public Builder clearQualifier() { - qualifier_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Column) - } - - static { - defaultInstance = new Column(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Column) - } - - public interface GetOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // repeated .Column column = 2; - java.util.List - getColumnList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); - int getColumnCount(); - java.util.List - getColumnOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index); - - // repeated .NameBytesPair attribute = 3; - java.util.List - getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); - int getAttributeCount(); - java.util.List - getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index); - - // optional uint64 lockId = 4; - boolean hasLockId(); - long getLockId(); - - // optional .Filter filter = 5; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - - // optional .TimeRange timeRange = 6; - boolean hasTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - - // optional uint32 maxVersions = 7 [default = 1]; - boolean hasMaxVersions(); - int getMaxVersions(); - - // optional bool cacheBlocks = 8 [default = true]; - boolean hasCacheBlocks(); - boolean getCacheBlocks(); - - // optional uint32 storeLimit = 9; - boolean hasStoreLimit(); - int getStoreLimit(); - - // optional uint32 storeOffset = 10; - boolean hasStoreOffset(); - int getStoreOffset(); - } - public static final class Get extends - com.google.protobuf.GeneratedMessage - implements GetOrBuilder { - // Use Get.newBuilder() to construct. - private Get(Builder builder) { - super(builder); - } - private Get(boolean noInit) {} - - private static final Get defaultInstance; - public static Get getDefaultInstance() { - return defaultInstance; - } - - public Get getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // repeated .Column column = 2; - public static final int COLUMN_FIELD_NUMBER = 2; - private java.util.List column_; - public java.util.List getColumnList() { - return column_; - } - public java.util.List - getColumnOrBuilderList() { - return column_; - } - public int getColumnCount() { - return column_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { - return column_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - return column_.get(index); - } - - // repeated .NameBytesPair attribute = 3; - public static final int ATTRIBUTE_FIELD_NUMBER = 3; - private java.util.List attribute_; - public java.util.List getAttributeList() { - return attribute_; - } - public java.util.List - getAttributeOrBuilderList() { - return attribute_; - } - public int getAttributeCount() { - return attribute_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { - return attribute_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index) { - return attribute_.get(index); - } - - // optional uint64 lockId = 4; - public static final int LOCKID_FIELD_NUMBER = 4; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getLockId() { - return lockId_; - } - - // optional .Filter filter = 5; - public static final int FILTER_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; - } - - // optional .TimeRange timeRange = 6; - public static final int TIMERANGE_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - public static final int MAXVERSIONS_FIELD_NUMBER = 7; - private int maxVersions_; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public int getMaxVersions() { - return maxVersions_; - } - - // optional bool cacheBlocks = 8 [default = true]; - public static final int CACHEBLOCKS_FIELD_NUMBER = 8; - private boolean cacheBlocks_; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - - // optional uint32 storeLimit = 9; - public static final int STORELIMIT_FIELD_NUMBER = 9; - private int storeLimit_; - public boolean hasStoreLimit() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getStoreLimit() { - return storeLimit_; - } - - // optional uint32 storeOffset = 10; - public static final int STOREOFFSET_FIELD_NUMBER = 10; - private int storeOffset_; - public boolean hasStoreOffset() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public int getStoreOffset() { - return storeOffset_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - column_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - lockId_ = 0L; - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - maxVersions_ = 1; - cacheBlocks_ = true; - storeLimit_ = 0; - storeOffset_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - for (int i = 0; i < column_.size(); i++) { - output.writeMessage(2, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - output.writeMessage(3, attribute_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(4, lockId_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeUInt32(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(8, cacheBlocks_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeUInt32(9, storeLimit_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeUInt32(10, storeOffset_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - for (int i = 0; i < column_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, attribute_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, lockId_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, cacheBlocks_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(9, storeLimit_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(10, storeOffset_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && getColumnList() - .equals(other.getColumnList()); - result = result && getAttributeList() - .equals(other.getAttributeList()); - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && (hasMaxVersions() == other.hasMaxVersions()); - if (hasMaxVersions()) { - result = result && (getMaxVersions() - == other.getMaxVersions()); - } - result = result && (hasCacheBlocks() == other.hasCacheBlocks()); - if (hasCacheBlocks()) { - result = result && (getCacheBlocks() - == other.getCacheBlocks()); - } - result = result && (hasStoreLimit() == other.hasStoreLimit()); - if (hasStoreLimit()) { - result = result && (getStoreLimit() - == other.getStoreLimit()); - } - result = result && (hasStoreOffset() == other.hasStoreOffset()); - if (hasStoreOffset()) { - result = result && (getStoreOffset() - == other.getStoreOffset()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (getColumnCount() > 0) { - hash = (37 * hash) + COLUMN_FIELD_NUMBER; - hash = (53 * hash) + getColumnList().hashCode(); - } - if (getAttributeCount() > 0) { - hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; - hash = (53 * hash) + getAttributeList().hashCode(); - } - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - if (hasMaxVersions()) { - hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; - hash = (53 * hash) + getMaxVersions(); - } - if (hasCacheBlocks()) { - hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCacheBlocks()); - } - if (hasStoreLimit()) { - hash = (37 * hash) + STORELIMIT_FIELD_NUMBER; - hash = (53 * hash) + getStoreLimit(); - } - if (hasStoreOffset()) { - hash = (37 * hash) + STOREOFFSET_FIELD_NUMBER; - hash = (53 * hash) + getStoreOffset(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnFieldBuilder(); - getAttributeFieldBuilder(); - getFilterFieldBuilder(); - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - columnBuilder_.clear(); - } - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - attributeBuilder_.clear(); - } - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - maxVersions_ = 1; - bitField0_ = (bitField0_ & ~0x00000040); - cacheBlocks_ = true; - bitField0_ = (bitField0_ & ~0x00000080); - storeLimit_ = 0; - bitField0_ = (bitField0_ & ~0x00000100); - storeOffset_ = 0; - bitField0_ = (bitField0_ & ~0x00000200); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (columnBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - column_ = java.util.Collections.unmodifiableList(column_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.column_ = column_; - } else { - result.column_ = columnBuilder_.build(); - } - if (attributeBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - attribute_ = java.util.Collections.unmodifiableList(attribute_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.attribute_ = attribute_; - } else { - result.attribute_ = attributeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000002; - } - result.lockId_ = lockId_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000004; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.maxVersions_ = maxVersions_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - result.cacheBlocks_ = cacheBlocks_; - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000040; - } - result.storeLimit_ = storeLimit_; - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000080; - } - result.storeOffset_ = storeOffset_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (columnBuilder_ == null) { - if (!other.column_.isEmpty()) { - if (column_.isEmpty()) { - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureColumnIsMutable(); - column_.addAll(other.column_); - } - onChanged(); - } - } else { - if (!other.column_.isEmpty()) { - if (columnBuilder_.isEmpty()) { - columnBuilder_.dispose(); - columnBuilder_ = null; - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000002); - columnBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnFieldBuilder() : null; - } else { - columnBuilder_.addAllMessages(other.column_); - } - } - } - if (attributeBuilder_ == null) { - if (!other.attribute_.isEmpty()) { - if (attribute_.isEmpty()) { - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureAttributeIsMutable(); - attribute_.addAll(other.attribute_); - } - onChanged(); - } - } else { - if (!other.attribute_.isEmpty()) { - if (attributeBuilder_.isEmpty()) { - attributeBuilder_.dispose(); - attributeBuilder_ = null; - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000004); - attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributeFieldBuilder() : null; - } else { - attributeBuilder_.addAllMessages(other.attribute_); - } - } - } - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - if (other.hasMaxVersions()) { - setMaxVersions(other.getMaxVersions()); - } - if (other.hasCacheBlocks()) { - setCacheBlocks(other.getCacheBlocks()); - } - if (other.hasStoreLimit()) { - setStoreLimit(other.getStoreLimit()); - } - if (other.hasStoreOffset()) { - setStoreOffset(other.getStoreOffset()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumn(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 32: { - bitField0_ |= 0x00000008; - lockId_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - maxVersions_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - cacheBlocks_ = input.readBool(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - storeLimit_ = input.readUInt32(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - storeOffset_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // repeated .Column column = 2; - private java.util.List column_ = - java.util.Collections.emptyList(); - private void ensureColumnIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - column_ = new java.util.ArrayList(column_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; - - public java.util.List getColumnList() { - if (columnBuilder_ == null) { - return java.util.Collections.unmodifiableList(column_); - } else { - return columnBuilder_.getMessageList(); - } - } - public int getColumnCount() { - if (columnBuilder_ == null) { - return column_.size(); - } else { - return columnBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { - if (columnBuilder_ == null) { - return column_.get(index); - } else { - return columnBuilder_.getMessage(index); - } - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.set(index, value); - onChanged(); - } else { - columnBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.set(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(value); - onChanged(); - } else { - columnBuilder_.addMessage(value); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(index, value); - onChanged(); - } else { - columnBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumn( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumn( - java.lang.Iterable values) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - super.addAll(values, column_); - onChanged(); - } else { - columnBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumn() { - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - columnBuilder_.clear(); - } - return this; - } - public Builder removeColumn(int index) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.remove(index); - onChanged(); - } else { - columnBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( - int index) { - return getColumnFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - if (columnBuilder_ == null) { - return column_.get(index); } else { - return columnBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnOrBuilderList() { - if (columnBuilder_ != null) { - return columnBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(column_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { - return getColumnFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( - int index) { - return getColumnFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); - } - public java.util.List - getColumnBuilderList() { - return getColumnFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> - getColumnFieldBuilder() { - if (columnBuilder_ == null) { - columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( - column_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - column_ = null; - } - return columnBuilder_; - } - - // repeated .NameBytesPair attribute = 3; - private java.util.List attribute_ = - java.util.Collections.emptyList(); - private void ensureAttributeIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - attribute_ = new java.util.ArrayList(attribute_); - bitField0_ |= 0x00000004; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - - public java.util.List getAttributeList() { - if (attributeBuilder_ == null) { - return java.util.Collections.unmodifiableList(attribute_); - } else { - return attributeBuilder_.getMessageList(); - } - } - public int getAttributeCount() { - if (attributeBuilder_ == null) { - return attribute_.size(); - } else { - return attributeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); - } else { - return attributeBuilder_.getMessage(index); - } - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.set(index, value); - onChanged(); - } else { - attributeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.set(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(value); - onChanged(); - } else { - attributeBuilder_.addMessage(value); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(index, value); - onChanged(); - } else { - attributeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttribute( - java.lang.Iterable values) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - super.addAll(values, attribute_); - onChanged(); - } else { - attributeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttribute() { - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - attributeBuilder_.clear(); - } - return this; - } - public Builder removeAttribute(int index) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.remove(index); - onChanged(); - } else { - attributeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( - int index) { - return getAttributeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); } else { - return attributeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributeOrBuilderList() { - if (attributeBuilder_ != null) { - return attributeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attribute_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { - return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( - int index) { - return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public java.util.List - getAttributeBuilderList() { - return getAttributeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getAttributeFieldBuilder() { - if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - attribute_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - attribute_ = null; - } - return attributeBuilder_; - } - - // optional uint64 lockId = 4; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000008; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000008); - lockId_ = 0L; - onChanged(); - return this; - } - - // optional .Filter filter = 5; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // optional .TimeRange timeRange = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000020; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - private int maxVersions_ = 1; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getMaxVersions() { - return maxVersions_; - } - public Builder setMaxVersions(int value) { - bitField0_ |= 0x00000040; - maxVersions_ = value; - onChanged(); - return this; - } - public Builder clearMaxVersions() { - bitField0_ = (bitField0_ & ~0x00000040); - maxVersions_ = 1; - onChanged(); - return this; - } - - // optional bool cacheBlocks = 8 [default = true]; - private boolean cacheBlocks_ = true; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - public Builder setCacheBlocks(boolean value) { - bitField0_ |= 0x00000080; - cacheBlocks_ = value; - onChanged(); - return this; - } - public Builder clearCacheBlocks() { - bitField0_ = (bitField0_ & ~0x00000080); - cacheBlocks_ = true; - onChanged(); - return this; - } - - // optional uint32 storeLimit = 9; - private int storeLimit_ ; - public boolean hasStoreLimit() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public int getStoreLimit() { - return storeLimit_; - } - public Builder setStoreLimit(int value) { - bitField0_ |= 0x00000100; - storeLimit_ = value; - onChanged(); - return this; - } - public Builder clearStoreLimit() { - bitField0_ = (bitField0_ & ~0x00000100); - storeLimit_ = 0; - onChanged(); - return this; - } - - // optional uint32 storeOffset = 10; - private int storeOffset_ ; - public boolean hasStoreOffset() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - public int getStoreOffset() { - return storeOffset_; - } - public Builder setStoreOffset(int value) { - bitField0_ |= 0x00000200; - storeOffset_ = value; - onChanged(); - return this; - } - public Builder clearStoreOffset() { - bitField0_ = (bitField0_ & ~0x00000200); - storeOffset_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Get) - } - - static { - defaultInstance = new Get(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Get) - } - - public interface ResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes keyValueBytes = 1; - java.util.List getKeyValueBytesList(); - int getKeyValueBytesCount(); - com.google.protobuf.ByteString getKeyValueBytes(int index); - } - public static final class Result extends - com.google.protobuf.GeneratedMessage - implements ResultOrBuilder { - // Use Result.newBuilder() to construct. - private Result(Builder builder) { - super(builder); - } - private Result(boolean noInit) {} - - private static final Result defaultInstance; - public static Result getDefaultInstance() { - return defaultInstance; - } - - public Result getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; - } - - // repeated bytes keyValueBytes = 1; - public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; - private java.util.List keyValueBytes_; - public java.util.List - getKeyValueBytesList() { - return keyValueBytes_; - } - public int getKeyValueBytesCount() { - return keyValueBytes_.size(); - } - public com.google.protobuf.ByteString getKeyValueBytes(int index) { - return keyValueBytes_.get(index); - } - - private void initFields() { - keyValueBytes_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < keyValueBytes_.size(); i++) { - output.writeBytes(1, keyValueBytes_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < keyValueBytes_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(keyValueBytes_.get(i)); - } - size += dataSize; - size += 1 * getKeyValueBytesList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj; - - boolean result = true; - result = result && getKeyValueBytesList() - .equals(other.getKeyValueBytesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getKeyValueBytesCount() > 0) { - hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; - hash = (53 * hash) + getKeyValueBytesList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - keyValueBytes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.keyValueBytes_ = keyValueBytes_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; - if (!other.keyValueBytes_.isEmpty()) { - if (keyValueBytes_.isEmpty()) { - keyValueBytes_ = other.keyValueBytes_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.addAll(other.keyValueBytes_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes keyValueBytes = 1; - private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; - private void ensureKeyValueBytesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getKeyValueBytesList() { - return java.util.Collections.unmodifiableList(keyValueBytes_); - } - public int getKeyValueBytesCount() { - return keyValueBytes_.size(); - } - public com.google.protobuf.ByteString getKeyValueBytes(int index) { - return keyValueBytes_.get(index); - } - public Builder setKeyValueBytes( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueBytesIsMutable(); - keyValueBytes_.set(index, value); - onChanged(); - return this; - } - public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(value); - onChanged(); - return this; - } - public Builder addAllKeyValueBytes( - java.lang.Iterable values) { - ensureKeyValueBytesIsMutable(); - super.addAll(values, keyValueBytes_); - onChanged(); - return this; - } - public Builder clearKeyValueBytes() { - keyValueBytes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Result) - } - - static { - defaultInstance = new Result(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Result) - } - - public interface GetRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .Get get = 2; - boolean hasGet(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); - - // optional bool closestRowBefore = 3; - boolean hasClosestRowBefore(); - boolean getClosestRowBefore(); - - // optional bool existenceOnly = 4; - boolean hasExistenceOnly(); - boolean getExistenceOnly(); - } - public static final class GetRequest extends - com.google.protobuf.GeneratedMessage - implements GetRequestOrBuilder { - // Use GetRequest.newBuilder() to construct. - private GetRequest(Builder builder) { - super(builder); - } - private GetRequest(boolean noInit) {} - - private static final GetRequest defaultInstance; - public static GetRequest getDefaultInstance() { - return defaultInstance; - } - - public GetRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .Get get = 2; - public static final int GET_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { - return get_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - return get_; - } - - // optional bool closestRowBefore = 3; - public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; - private boolean closestRowBefore_; - public boolean hasClosestRowBefore() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getClosestRowBefore() { - return closestRowBefore_; - } - - // optional bool existenceOnly = 4; - public static final int EXISTENCEONLY_FIELD_NUMBER = 4; - private boolean existenceOnly_; - public boolean hasExistenceOnly() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getExistenceOnly() { - return existenceOnly_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - closestRowBefore_ = false; - existenceOnly_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasGet()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getGet().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, get_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBool(3, closestRowBefore_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(4, existenceOnly_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, get_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, closestRowBefore_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, existenceOnly_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasGet() == other.hasGet()); - if (hasGet()) { - result = result && getGet() - .equals(other.getGet()); - } - result = result && (hasClosestRowBefore() == other.hasClosestRowBefore()); - if (hasClosestRowBefore()) { - result = result && (getClosestRowBefore() - == other.getClosestRowBefore()); - } - result = result && (hasExistenceOnly() == other.hasExistenceOnly()); - if (hasExistenceOnly()) { - result = result && (getExistenceOnly() - == other.getExistenceOnly()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasGet()) { - hash = (37 * hash) + GET_FIELD_NUMBER; - hash = (53 * hash) + getGet().hashCode(); - } - if (hasClosestRowBefore()) { - hash = (37 * hash) + CLOSESTROWBEFORE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClosestRowBefore()); - } - if (hasExistenceOnly()) { - hash = (37 * hash) + EXISTENCEONLY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExistenceOnly()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getGetFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - closestRowBefore_ = false; - bitField0_ = (bitField0_ & ~0x00000004); - existenceOnly_ = false; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (getBuilder_ == null) { - result.get_ = get_; - } else { - result.get_ = getBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.closestRowBefore_ = closestRowBefore_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.existenceOnly_ = existenceOnly_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasGet()) { - mergeGet(other.getGet()); - } - if (other.hasClosestRowBefore()) { - setClosestRowBefore(other.getClosestRowBefore()); - } - if (other.hasExistenceOnly()) { - setExistenceOnly(other.getExistenceOnly()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasGet()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getGet().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); - if (hasGet()) { - subBuilder.mergeFrom(getGet()); - } - input.readMessage(subBuilder, extensionRegistry); - setGet(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - closestRowBefore_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - existenceOnly_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .Get get = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { - if (getBuilder_ == null) { - return get_; - } else { - return getBuilder_.getMessage(); - } - } - public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { - if (getBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - get_ = value; - onChanged(); - } else { - getBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setGet( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { - if (getBuilder_ == null) { - get_ = builderForValue.build(); - onChanged(); - } else { - getBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { - if (getBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { - get_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); - } else { - get_ = value; - } - onChanged(); - } else { - getBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearGet() { - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - onChanged(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getGetFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - if (getBuilder_ != null) { - return getBuilder_.getMessageOrBuilder(); - } else { - return get_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> - getGetFieldBuilder() { - if (getBuilder_ == null) { - getBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( - get_, - getParentForChildren(), - isClean()); - get_ = null; - } - return getBuilder_; - } - - // optional bool closestRowBefore = 3; - private boolean closestRowBefore_ ; - public boolean hasClosestRowBefore() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getClosestRowBefore() { - return closestRowBefore_; - } - public Builder setClosestRowBefore(boolean value) { - bitField0_ |= 0x00000004; - closestRowBefore_ = value; - onChanged(); - return this; - } - public Builder clearClosestRowBefore() { - bitField0_ = (bitField0_ & ~0x00000004); - closestRowBefore_ = false; - onChanged(); - return this; - } - - // optional bool existenceOnly = 4; - private boolean existenceOnly_ ; - public boolean hasExistenceOnly() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getExistenceOnly() { - return existenceOnly_; - } - public Builder setExistenceOnly(boolean value) { - bitField0_ |= 0x00000008; - existenceOnly_ = value; - onChanged(); - return this; - } - public Builder clearExistenceOnly() { - bitField0_ = (bitField0_ & ~0x00000008); - existenceOnly_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetRequest) - } - - static { - defaultInstance = new GetRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetRequest) - } - - public interface GetResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .Result result = 1; - boolean hasResult(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); - - // optional bool exists = 2; - boolean hasExists(); - boolean getExists(); - } - public static final class GetResponse extends - com.google.protobuf.GeneratedMessage - implements GetResponseOrBuilder { - // Use GetResponse.newBuilder() to construct. - private GetResponse(Builder builder) { - super(builder); - } - private GetResponse(boolean noInit) {} - - private static final GetResponse defaultInstance; - public static GetResponse getDefaultInstance() { - return defaultInstance; - } - - public GetResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional .Result result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { - return result_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; - } - - // optional bool exists = 2; - public static final int EXISTS_FIELD_NUMBER = 2; - private boolean exists_; - public boolean hasExists() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getExists() { - return exists_; - } - - private void initFields() { - result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - exists_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, exists_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, exists_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj; - - boolean result = true; - result = result && (hasResult() == other.hasResult()); - if (hasResult()) { - result = result && getResult() - .equals(other.getResult()); - } - result = result && (hasExists() == other.hasExists()); - if (hasExists()) { - result = result && (getExists() - == other.getExists()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResult()) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResult().hashCode(); - } - if (hasExists()) { - hash = (37 * hash) + EXISTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExists()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - exists_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (resultBuilder_ == null) { - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.exists_ = exists_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; - if (other.hasResult()) { - mergeResult(other.getResult()); - } - if (other.hasExists()) { - setExists(other.getExists()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - if (hasResult()) { - subBuilder.mergeFrom(getResult()); - } - input.readMessage(subBuilder, extensionRegistry); - setResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - exists_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional .Result result = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { - if (resultBuilder_ == null) { - return result_; - } else { - return resultBuilder_.getMessage(); - } - } - public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - result_ = value; - onChanged(); - } else { - resultBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setResult( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - result_ = builderForValue.build(); - onChanged(); - } else { - resultBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { - result_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); - } else { - result_ = value; - } - onChanged(); - } else { - resultBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - onChanged(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getResultFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilder(); - } else { - return result_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( - result_, - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // optional bool exists = 2; - private boolean exists_ ; - public boolean hasExists() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getExists() { - return exists_; - } - public Builder setExists(boolean value) { - bitField0_ |= 0x00000002; - exists_ = value; - onChanged(); - return this; - } - public Builder clearExists() { - bitField0_ = (bitField0_ & ~0x00000002); - exists_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetResponse) - } - - static { - defaultInstance = new GetResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetResponse) - } - - public interface ConditionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required bytes family = 2; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required bytes qualifier = 3; - boolean hasQualifier(); - com.google.protobuf.ByteString getQualifier(); - - // required .CompareType compareType = 4; - boolean hasCompareType(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType(); - - // required .Comparator comparator = 5; - boolean hasComparator(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); - } - public static final class Condition extends - com.google.protobuf.GeneratedMessage - implements ConditionOrBuilder { - // Use Condition.newBuilder() to construct. - private Condition(Builder builder) { - super(builder); - } - private Condition(boolean noInit) {} - - private static final Condition defaultInstance; - public static Condition getDefaultInstance() { - return defaultInstance; - } - - public Condition getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required bytes family = 2; - public static final int FAMILY_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required bytes qualifier = 3; - public static final int QUALIFIER_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString qualifier_; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - // required .CompareType compareType = 4; - public static final int COMPARETYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_; - public boolean hasCompareType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { - return compareType_; - } - - // required .Comparator comparator = 5; - public static final int COMPARATOR_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { - return comparator_; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - return comparator_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasQualifier()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCompareType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasComparator()) { - memoizedIsInitialized = 0; - return false; - } - if (!getComparator().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, qualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, compareType_.getNumber()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, comparator_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, qualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, compareType_.getNumber()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, comparator_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && (hasCompareType() == other.hasCompareType()); - if (hasCompareType()) { - result = result && - (getCompareType() == other.getCompareType()); - } - result = result && (hasComparator() == other.hasComparator()); - if (hasComparator()) { - result = result && getComparator() - .equals(other.getComparator()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - if (hasCompareType()) { - hash = (37 * hash) + COMPARETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareType()); - } - if (hasComparator()) { - hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; - hash = (53 * hash) + getComparator().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getComparatorFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - bitField0_ = (bitField0_ & ~0x00000008); - if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } else { - comparatorBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.qualifier_ = qualifier_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.compareType_ = compareType_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - if (comparatorBuilder_ == null) { - result.comparator_ = comparator_; - } else { - result.comparator_ = comparatorBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - if (other.hasCompareType()) { - setCompareType(other.getCompareType()); - } - if (other.hasComparator()) { - mergeComparator(other.getComparator()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasFamily()) { - - return false; - } - if (!hasQualifier()) { - - return false; - } - if (!hasCompareType()) { - - return false; - } - if (!hasComparator()) { - - return false; - } - if (!getComparator().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - family_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - qualifier_ = input.readBytes(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - compareType_ = value; - } - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); - if (hasComparator()) { - subBuilder.mergeFrom(getComparator()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparator(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required bytes family = 2; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000002); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required bytes qualifier = 3; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - qualifier_ = value; - onChanged(); - return this; - } - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000004); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // required .CompareType compareType = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - public boolean hasCompareType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { - return compareType_; - } - public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - compareType_ = value; - onChanged(); - return this; - } - public Builder clearCompareType() { - bitField0_ = (bitField0_ & ~0x00000008); - compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - onChanged(); - return this; - } - - // required .Comparator comparator = 5; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { - if (comparatorBuilder_ == null) { - return comparator_; - } else { - return comparatorBuilder_.getMessage(); - } - } - public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { - if (comparatorBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - comparator_ = value; - onChanged(); - } else { - comparatorBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setComparator( - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { - if (comparatorBuilder_ == null) { - comparator_ = builderForValue.build(); - onChanged(); - } else { - comparatorBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { - if (comparatorBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { - comparator_ = - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); - } else { - comparator_ = value; - } - onChanged(); - } else { - comparatorBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearComparator() { - if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - onChanged(); - } else { - comparatorBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getComparatorFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - if (comparatorBuilder_ != null) { - return comparatorBuilder_.getMessageOrBuilder(); - } else { - return comparator_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> - getComparatorFieldBuilder() { - if (comparatorBuilder_ == null) { - comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( - comparator_, - getParentForChildren(), - isClean()); - comparator_ = null; - } - return comparatorBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Condition) - } - - static { - defaultInstance = new Condition(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Condition) - } - - public interface MutateOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required .Mutate.MutateType mutateType = 2; - boolean hasMutateType(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType(); - - // repeated .Mutate.ColumnValue columnValue = 3; - java.util.List - getColumnValueList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index); - int getColumnValueCount(); - java.util.List - getColumnValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( - int index); - - // repeated .NameBytesPair attribute = 4; - java.util.List - getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); - int getAttributeCount(); - java.util.List - getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index); - - // optional uint64 timestamp = 5; - boolean hasTimestamp(); - long getTimestamp(); - - // optional uint64 lockId = 6; - boolean hasLockId(); - long getLockId(); - - // optional bool writeToWAL = 7 [default = true]; - boolean hasWriteToWAL(); - boolean getWriteToWAL(); - - // optional .TimeRange timeRange = 10; - boolean hasTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - } - public static final class Mutate extends - com.google.protobuf.GeneratedMessage - implements MutateOrBuilder { - // Use Mutate.newBuilder() to construct. - private Mutate(Builder builder) { - super(builder); - } - private Mutate(boolean noInit) {} - - private static final Mutate defaultInstance; - public static Mutate getDefaultInstance() { - return defaultInstance; - } - - public Mutate getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; - } - - public enum MutateType - implements com.google.protobuf.ProtocolMessageEnum { - APPEND(0, 0), - INCREMENT(1, 1), - PUT(2, 2), - DELETE(3, 3), - ; - - public static final int APPEND_VALUE = 0; - public static final int INCREMENT_VALUE = 1; - public static final int PUT_VALUE = 2; - public static final int DELETE_VALUE = 3; - - - public final int getNumber() { return value; } - - public static MutateType valueOf(int value) { - switch (value) { - case 0: return APPEND; - case 1: return INCREMENT; - case 2: return PUT; - case 3: return DELETE; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public MutateType findValueByNumber(int number) { - return MutateType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); - } - - private static final MutateType[] VALUES = { - APPEND, INCREMENT, PUT, DELETE, - }; - - public static MutateType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private MutateType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Mutate.MutateType) - } - - public enum DeleteType - implements com.google.protobuf.ProtocolMessageEnum { - DELETE_ONE_VERSION(0, 0), - DELETE_MULTIPLE_VERSIONS(1, 1), - DELETE_FAMILY(2, 2), - ; - - public static final int DELETE_ONE_VERSION_VALUE = 0; - public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; - public static final int DELETE_FAMILY_VALUE = 2; - - - public final int getNumber() { return value; } - - public static DeleteType valueOf(int value) { - switch (value) { - case 0: return DELETE_ONE_VERSION; - case 1: return DELETE_MULTIPLE_VERSIONS; - case 2: return DELETE_FAMILY; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public DeleteType findValueByNumber(int number) { - return DeleteType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(1); - } - - private static final DeleteType[] VALUES = { - DELETE_ONE_VERSION, DELETE_MULTIPLE_VERSIONS, DELETE_FAMILY, - }; - - public static DeleteType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private DeleteType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Mutate.DeleteType) - } - - public interface ColumnValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - java.util.List - getQualifierValueList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); - int getQualifierValueCount(); - java.util.List - getQualifierValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( - int index); - } - public static final class ColumnValue extends - com.google.protobuf.GeneratedMessage - implements ColumnValueOrBuilder { - // Use ColumnValue.newBuilder() to construct. - private ColumnValue(Builder builder) { - super(builder); - } - private ColumnValue(boolean noInit) {} - - private static final ColumnValue defaultInstance; - public static ColumnValue getDefaultInstance() { - return defaultInstance; - } - - public ColumnValue getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; - } - - public interface QualifierValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes qualifier = 1; - boolean hasQualifier(); - com.google.protobuf.ByteString getQualifier(); - - // optional bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - - // optional uint64 timestamp = 3; - boolean hasTimestamp(); - long getTimestamp(); - - // optional .Mutate.DeleteType deleteType = 4; - boolean hasDeleteType(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType(); - } - public static final class QualifierValue extends - com.google.protobuf.GeneratedMessage - implements QualifierValueOrBuilder { - // Use QualifierValue.newBuilder() to construct. - private QualifierValue(Builder builder) { - super(builder); - } - private QualifierValue(boolean noInit) {} - - private static final QualifierValue defaultInstance; - public static QualifierValue getDefaultInstance() { - return defaultInstance; - } - - public QualifierValue getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes qualifier = 1; - public static final int QUALIFIER_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString qualifier_; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - // optional bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - // optional uint64 timestamp = 3; - public static final int TIMESTAMP_FIELD_NUMBER = 3; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - - // optional .Mutate.DeleteType deleteType = 4; - public static final int DELETETYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_; - public boolean hasDeleteType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { - return deleteType_; - } - - private void initFields() { - qualifier_ = com.google.protobuf.ByteString.EMPTY; - value_ = com.google.protobuf.ByteString.EMPTY; - timestamp_ = 0L; - deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, qualifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, deleteType_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, qualifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, deleteType_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) obj; - - boolean result = true; - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && (hasDeleteType() == other.hasDeleteType()); - if (hasDeleteType()) { - result = result && - (getDeleteType() == other.getDeleteType()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - if (hasDeleteType()) { - hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getDeleteType()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.qualifier_ = qualifier_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.timestamp_ = timestamp_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.deleteType_ = deleteType_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - if (other.hasDeleteType()) { - setDeleteType(other.getDeleteType()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - qualifier_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - deleteType_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // optional bytes qualifier = 1; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - qualifier_ = value; - onChanged(); - return this; - } - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // optional bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // optional uint64 timestamp = 3; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000004; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - onChanged(); - return this; - } - - // optional .Mutate.DeleteType deleteType = 4; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; - public boolean hasDeleteType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { - return deleteType_; - } - public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - deleteType_ = value; - onChanged(); - return this; - } - public Builder clearDeleteType() { - bitField0_ = (bitField0_ & ~0x00000008); - deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue.QualifierValue) - } - - static { - defaultInstance = new QualifierValue(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Mutate.ColumnValue.QualifierValue) - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; - private java.util.List qualifierValue_; - public java.util.List getQualifierValueList() { - return qualifierValue_; - } - public java.util.List - getQualifierValueOrBuilderList() { - return qualifierValue_; - } - public int getQualifierValueCount() { - return qualifierValue_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { - return qualifierValue_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( - int index) { - return qualifierValue_.get(index); - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifierValue_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - for (int i = 0; i < qualifierValue_.size(); i++) { - output.writeMessage(2, qualifierValue_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - for (int i = 0; i < qualifierValue_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, qualifierValue_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && getQualifierValueList() - .equals(other.getQualifierValueList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (getQualifierValueCount() > 0) { - hash = (37 * hash) + QUALIFIERVALUE_FIELD_NUMBER; - hash = (53 * hash) + getQualifierValueList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getQualifierValueFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (qualifierValueBuilder_ == null) { - qualifierValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - qualifierValueBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (qualifierValueBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.qualifierValue_ = qualifierValue_; - } else { - result.qualifierValue_ = qualifierValueBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (qualifierValueBuilder_ == null) { - if (!other.qualifierValue_.isEmpty()) { - if (qualifierValue_.isEmpty()) { - qualifierValue_ = other.qualifierValue_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureQualifierValueIsMutable(); - qualifierValue_.addAll(other.qualifierValue_); - } - onChanged(); - } - } else { - if (!other.qualifierValue_.isEmpty()) { - if (qualifierValueBuilder_.isEmpty()) { - qualifierValueBuilder_.dispose(); - qualifierValueBuilder_ = null; - qualifierValue_ = other.qualifierValue_; - bitField0_ = (bitField0_ & ~0x00000002); - qualifierValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getQualifierValueFieldBuilder() : null; - } else { - qualifierValueBuilder_.addAllMessages(other.qualifierValue_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addQualifierValue(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - private java.util.List qualifierValue_ = - java.util.Collections.emptyList(); - private void ensureQualifierValueIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - qualifierValue_ = new java.util.ArrayList(qualifierValue_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; - - public java.util.List getQualifierValueList() { - if (qualifierValueBuilder_ == null) { - return java.util.Collections.unmodifiableList(qualifierValue_); - } else { - return qualifierValueBuilder_.getMessageList(); - } - } - public int getQualifierValueCount() { - if (qualifierValueBuilder_ == null) { - return qualifierValue_.size(); - } else { - return qualifierValueBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { - if (qualifierValueBuilder_ == null) { - return qualifierValue_.get(index); - } else { - return qualifierValueBuilder_.getMessage(index); - } - } - public Builder setQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { - if (qualifierValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierValueIsMutable(); - qualifierValue_.set(index, value); - onChanged(); - } else { - qualifierValueBuilder_.setMessage(index, value); - } - return this; - } - public Builder setQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.set(index, builderForValue.build()); - onChanged(); - } else { - qualifierValueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { - if (qualifierValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierValueIsMutable(); - qualifierValue_.add(value); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(value); - } - return this; - } - public Builder addQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { - if (qualifierValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierValueIsMutable(); - qualifierValue_.add(index, value); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addQualifierValue( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.add(builderForValue.build()); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.add(index, builderForValue.build()); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllQualifierValue( - java.lang.Iterable values) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - super.addAll(values, qualifierValue_); - onChanged(); - } else { - qualifierValueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearQualifierValue() { - if (qualifierValueBuilder_ == null) { - qualifierValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - qualifierValueBuilder_.clear(); - } - return this; - } - public Builder removeQualifierValue(int index) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.remove(index); - onChanged(); - } else { - qualifierValueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( - int index) { - return getQualifierValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( - int index) { - if (qualifierValueBuilder_ == null) { - return qualifierValue_.get(index); } else { - return qualifierValueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getQualifierValueOrBuilderList() { - if (qualifierValueBuilder_ != null) { - return qualifierValueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(qualifierValue_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { - return getQualifierValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( - int index) { - return getQualifierValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); - } - public java.util.List - getQualifierValueBuilderList() { - return getQualifierValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> - getQualifierValueFieldBuilder() { - if (qualifierValueBuilder_ == null) { - qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( - qualifierValue_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - qualifierValue_ = null; - } - return qualifierValueBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue) - } - - static { - defaultInstance = new ColumnValue(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Mutate.ColumnValue) - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required .Mutate.MutateType mutateType = 2; - public static final int MUTATETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_; - public boolean hasMutateType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { - return mutateType_; - } - - // repeated .Mutate.ColumnValue columnValue = 3; - public static final int COLUMNVALUE_FIELD_NUMBER = 3; - private java.util.List columnValue_; - public java.util.List getColumnValueList() { - return columnValue_; - } - public java.util.List - getColumnValueOrBuilderList() { - return columnValue_; - } - public int getColumnValueCount() { - return columnValue_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { - return columnValue_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( - int index) { - return columnValue_.get(index); - } - - // repeated .NameBytesPair attribute = 4; - public static final int ATTRIBUTE_FIELD_NUMBER = 4; - private java.util.List attribute_; - public java.util.List getAttributeList() { - return attribute_; - } - public java.util.List - getAttributeOrBuilderList() { - return attribute_; - } - public int getAttributeCount() { - return attribute_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { - return attribute_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index) { - return attribute_.get(index); - } - - // optional uint64 timestamp = 5; - public static final int TIMESTAMP_FIELD_NUMBER = 5; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - - // optional uint64 lockId = 6; - public static final int LOCKID_FIELD_NUMBER = 6; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getLockId() { - return lockId_; - } - - // optional bool writeToWAL = 7 [default = true]; - public static final int WRITETOWAL_FIELD_NUMBER = 7; - private boolean writeToWAL_; - public boolean hasWriteToWAL() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getWriteToWAL() { - return writeToWAL_; - } - - // optional .TimeRange timeRange = 10; - public static final int TIMERANGE_FIELD_NUMBER = 10; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; - columnValue_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - timestamp_ = 0L; - lockId_ = 0L; - writeToWAL_ = true; - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMutateType()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getColumnValueCount(); i++) { - if (!getColumnValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, mutateType_.getNumber()); - } - for (int i = 0; i < columnValue_.size(); i++) { - output.writeMessage(3, columnValue_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - output.writeMessage(4, attribute_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(5, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(6, lockId_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(7, writeToWAL_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeMessage(10, timeRange_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, mutateType_.getNumber()); - } - for (int i = 0; i < columnValue_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, columnValue_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, attribute_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(5, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(6, lockId_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(7, writeToWAL_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(10, timeRange_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasMutateType() == other.hasMutateType()); - if (hasMutateType()) { - result = result && - (getMutateType() == other.getMutateType()); - } - result = result && getColumnValueList() - .equals(other.getColumnValueList()); - result = result && getAttributeList() - .equals(other.getAttributeList()); - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && (hasWriteToWAL() == other.hasWriteToWAL()); - if (hasWriteToWAL()) { - result = result && (getWriteToWAL() - == other.getWriteToWAL()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasMutateType()) { - hash = (37 * hash) + MUTATETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getMutateType()); - } - if (getColumnValueCount() > 0) { - hash = (37 * hash) + COLUMNVALUE_FIELD_NUMBER; - hash = (53 * hash) + getColumnValueList().hashCode(); - } - if (getAttributeCount() > 0) { - hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; - hash = (53 * hash) + getAttributeList().hashCode(); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - if (hasWriteToWAL()) { - hash = (37 * hash) + WRITETOWAL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getWriteToWAL()); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnValueFieldBuilder(); - getAttributeFieldBuilder(); - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; - bitField0_ = (bitField0_ & ~0x00000002); - if (columnValueBuilder_ == null) { - columnValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - columnValueBuilder_.clear(); - } - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - attributeBuilder_.clear(); - } - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000010); - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000020); - writeToWAL_ = true; - bitField0_ = (bitField0_ & ~0x00000040); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.mutateType_ = mutateType_; - if (columnValueBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - columnValue_ = java.util.Collections.unmodifiableList(columnValue_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.columnValue_ = columnValue_; - } else { - result.columnValue_ = columnValueBuilder_.build(); - } - if (attributeBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - attribute_ = java.util.Collections.unmodifiableList(attribute_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.attribute_ = attribute_; - } else { - result.attribute_ = attributeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000004; - } - result.timestamp_ = timestamp_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - result.lockId_ = lockId_; - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.writeToWAL_ = writeToWAL_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasMutateType()) { - setMutateType(other.getMutateType()); - } - if (columnValueBuilder_ == null) { - if (!other.columnValue_.isEmpty()) { - if (columnValue_.isEmpty()) { - columnValue_ = other.columnValue_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureColumnValueIsMutable(); - columnValue_.addAll(other.columnValue_); - } - onChanged(); - } - } else { - if (!other.columnValue_.isEmpty()) { - if (columnValueBuilder_.isEmpty()) { - columnValueBuilder_.dispose(); - columnValueBuilder_ = null; - columnValue_ = other.columnValue_; - bitField0_ = (bitField0_ & ~0x00000004); - columnValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnValueFieldBuilder() : null; - } else { - columnValueBuilder_.addAllMessages(other.columnValue_); - } - } - } - if (attributeBuilder_ == null) { - if (!other.attribute_.isEmpty()) { - if (attribute_.isEmpty()) { - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensureAttributeIsMutable(); - attribute_.addAll(other.attribute_); - } - onChanged(); - } - } else { - if (!other.attribute_.isEmpty()) { - if (attributeBuilder_.isEmpty()) { - attributeBuilder_.dispose(); - attributeBuilder_ = null; - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000008); - attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributeFieldBuilder() : null; - } else { - attributeBuilder_.addAllMessages(other.attribute_); - } - } - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - if (other.hasWriteToWAL()) { - setWriteToWAL(other.getWriteToWAL()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasMutateType()) { - - return false; - } - for (int i = 0; i < getColumnValueCount(); i++) { - if (!getColumnValue(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - mutateType_ = value; - } - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumnValue(subBuilder.buildPartial()); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 40: { - bitField0_ |= 0x00000010; - timestamp_ = input.readUInt64(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - lockId_ = input.readUInt64(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - writeToWAL_ = input.readBool(); - break; - } - case 82: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required .Mutate.MutateType mutateType = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; - public boolean hasMutateType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { - return mutateType_; - } - public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - mutateType_ = value; - onChanged(); - return this; - } - public Builder clearMutateType() { - bitField0_ = (bitField0_ & ~0x00000002); - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; - onChanged(); - return this; - } - - // repeated .Mutate.ColumnValue columnValue = 3; - private java.util.List columnValue_ = - java.util.Collections.emptyList(); - private void ensureColumnValueIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - columnValue_ = new java.util.ArrayList(columnValue_); - bitField0_ |= 0x00000004; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; - - public java.util.List getColumnValueList() { - if (columnValueBuilder_ == null) { - return java.util.Collections.unmodifiableList(columnValue_); - } else { - return columnValueBuilder_.getMessageList(); - } - } - public int getColumnValueCount() { - if (columnValueBuilder_ == null) { - return columnValue_.size(); - } else { - return columnValueBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { - if (columnValueBuilder_ == null) { - return columnValue_.get(index); - } else { - return columnValueBuilder_.getMessage(index); - } - } - public Builder setColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { - if (columnValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnValueIsMutable(); - columnValue_.set(index, value); - onChanged(); - } else { - columnValueBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.set(index, builderForValue.build()); - onChanged(); - } else { - columnValueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { - if (columnValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnValueIsMutable(); - columnValue_.add(value); - onChanged(); - } else { - columnValueBuilder_.addMessage(value); - } - return this; - } - public Builder addColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { - if (columnValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnValueIsMutable(); - columnValue_.add(index, value); - onChanged(); - } else { - columnValueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumnValue( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.add(builderForValue.build()); - onChanged(); - } else { - columnValueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.add(index, builderForValue.build()); - onChanged(); - } else { - columnValueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumnValue( - java.lang.Iterable values) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - super.addAll(values, columnValue_); - onChanged(); - } else { - columnValueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumnValue() { - if (columnValueBuilder_ == null) { - columnValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - columnValueBuilder_.clear(); - } - return this; - } - public Builder removeColumnValue(int index) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.remove(index); - onChanged(); - } else { - columnValueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( - int index) { - return getColumnValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( - int index) { - if (columnValueBuilder_ == null) { - return columnValue_.get(index); } else { - return columnValueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnValueOrBuilderList() { - if (columnValueBuilder_ != null) { - return columnValueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(columnValue_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { - return getColumnValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( - int index) { - return getColumnValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); - } - public java.util.List - getColumnValueBuilderList() { - return getColumnValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> - getColumnValueFieldBuilder() { - if (columnValueBuilder_ == null) { - columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder>( - columnValue_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - columnValue_ = null; - } - return columnValueBuilder_; - } - - // repeated .NameBytesPair attribute = 4; - private java.util.List attribute_ = - java.util.Collections.emptyList(); - private void ensureAttributeIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - attribute_ = new java.util.ArrayList(attribute_); - bitField0_ |= 0x00000008; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - - public java.util.List getAttributeList() { - if (attributeBuilder_ == null) { - return java.util.Collections.unmodifiableList(attribute_); - } else { - return attributeBuilder_.getMessageList(); - } - } - public int getAttributeCount() { - if (attributeBuilder_ == null) { - return attribute_.size(); - } else { - return attributeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); - } else { - return attributeBuilder_.getMessage(index); - } - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.set(index, value); - onChanged(); - } else { - attributeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.set(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(value); - onChanged(); - } else { - attributeBuilder_.addMessage(value); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(index, value); - onChanged(); - } else { - attributeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttribute( - java.lang.Iterable values) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - super.addAll(values, attribute_); - onChanged(); - } else { - attributeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttribute() { - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - attributeBuilder_.clear(); - } - return this; - } - public Builder removeAttribute(int index) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.remove(index); - onChanged(); - } else { - attributeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( - int index) { - return getAttributeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); } else { - return attributeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributeOrBuilderList() { - if (attributeBuilder_ != null) { - return attributeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attribute_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { - return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( - int index) { - return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public java.util.List - getAttributeBuilderList() { - return getAttributeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getAttributeFieldBuilder() { - if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - attribute_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - attribute_ = null; - } - return attributeBuilder_; - } - - // optional uint64 timestamp = 5; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000010; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000010); - timestamp_ = 0L; - onChanged(); - return this; - } - - // optional uint64 lockId = 6; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000020; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000020); - lockId_ = 0L; - onChanged(); - return this; - } - - // optional bool writeToWAL = 7 [default = true]; - private boolean writeToWAL_ = true; - public boolean hasWriteToWAL() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public boolean getWriteToWAL() { - return writeToWAL_; - } - public Builder setWriteToWAL(boolean value) { - bitField0_ |= 0x00000040; - writeToWAL_ = value; - onChanged(); - return this; - } - public Builder clearWriteToWAL() { - bitField0_ = (bitField0_ & ~0x00000040); - writeToWAL_ = true; - onChanged(); - return this; - } - - // optional .TimeRange timeRange = 10; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000080; - return this; - } - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000080; - return this; - } - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000080) == 0x00000080) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000080; - return this; - } - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000080; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Mutate) - } - - static { - defaultInstance = new Mutate(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Mutate) - } - - public interface MutateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .Mutate mutate = 2; - boolean hasMutate(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder(); - - // optional .Condition condition = 3; - boolean hasCondition(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); - } - public static final class MutateRequest extends - com.google.protobuf.GeneratedMessage - implements MutateRequestOrBuilder { - // Use MutateRequest.newBuilder() to construct. - private MutateRequest(Builder builder) { - super(builder); - } - private MutateRequest(boolean noInit) {} - - private static final MutateRequest defaultInstance; - public static MutateRequest getDefaultInstance() { - return defaultInstance; - } - - public MutateRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .Mutate mutate = 2; - public static final int MUTATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_; - public boolean hasMutate() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { - return mutate_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { - return mutate_; - } - - // optional .Condition condition = 3; - public static final int CONDITION_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_; - public boolean hasCondition() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { - return condition_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { - return condition_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMutate()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getMutate().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (hasCondition()) { - if (!getCondition().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, mutate_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, condition_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, mutate_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, condition_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasMutate() == other.hasMutate()); - if (hasMutate()) { - result = result && getMutate() - .equals(other.getMutate()); - } - result = result && (hasCondition() == other.hasCondition()); - if (hasCondition()) { - result = result && getCondition() - .equals(other.getCondition()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasMutate()) { - hash = (37 * hash) + MUTATE_FIELD_NUMBER; - hash = (53 * hash) + getMutate().hashCode(); - } - if (hasCondition()) { - hash = (37 * hash) + CONDITION_FIELD_NUMBER; - hash = (53 * hash) + getCondition().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getMutateFieldBuilder(); - getConditionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - } else { - mutateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - } else { - conditionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (mutateBuilder_ == null) { - result.mutate_ = mutate_; - } else { - result.mutate_ = mutateBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - if (conditionBuilder_ == null) { - result.condition_ = condition_; - } else { - result.condition_ = conditionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasMutate()) { - mergeMutate(other.getMutate()); - } - if (other.hasCondition()) { - mergeCondition(other.getCondition()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasMutate()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getMutate().isInitialized()) { - - return false; - } - if (hasCondition()) { - if (!getCondition().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); - if (hasMutate()) { - subBuilder.mergeFrom(getMutate()); - } - input.readMessage(subBuilder, extensionRegistry); - setMutate(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(); - if (hasCondition()) { - subBuilder.mergeFrom(getCondition()); - } - input.readMessage(subBuilder, extensionRegistry); - setCondition(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .Mutate mutate = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutateBuilder_; - public boolean hasMutate() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { - if (mutateBuilder_ == null) { - return mutate_; - } else { - return mutateBuilder_.getMessage(); - } - } - public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutateBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - mutate_ = value; - onChanged(); - } else { - mutateBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setMutate( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { - if (mutateBuilder_ == null) { - mutate_ = builderForValue.build(); - onChanged(); - } else { - mutateBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutateBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - mutate_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) { - mutate_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); - } else { - mutate_ = value; - } - onChanged(); - } else { - mutateBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearMutate() { - if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - onChanged(); - } else { - mutateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutateBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getMutateFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { - if (mutateBuilder_ != null) { - return mutateBuilder_.getMessageOrBuilder(); - } else { - return mutate_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> - getMutateFieldBuilder() { - if (mutateBuilder_ == null) { - mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( - mutate_, - getParentForChildren(), - isClean()); - mutate_ = null; - } - return mutateBuilder_; - } - - // optional .Condition condition = 3; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; - public boolean hasCondition() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { - if (conditionBuilder_ == null) { - return condition_; - } else { - return conditionBuilder_.getMessage(); - } - } - public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { - if (conditionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - condition_ = value; - onChanged(); - } else { - conditionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder setCondition( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { - if (conditionBuilder_ == null) { - condition_ = builderForValue.build(); - onChanged(); - } else { - conditionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { - if (conditionBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004) && - condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { - condition_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); - } else { - condition_ = value; - } - onChanged(); - } else { - conditionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder clearCondition() { - if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - onChanged(); - } else { - conditionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { - bitField0_ |= 0x00000004; - onChanged(); - return getConditionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { - if (conditionBuilder_ != null) { - return conditionBuilder_.getMessageOrBuilder(); - } else { - return condition_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> - getConditionFieldBuilder() { - if (conditionBuilder_ == null) { - conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>( - condition_, - getParentForChildren(), - isClean()); - condition_ = null; - } - return conditionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MutateRequest) - } - - static { - defaultInstance = new MutateRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MutateRequest) - } - - public interface MutateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .Result result = 1; - boolean hasResult(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); - - // optional bool processed = 2; - boolean hasProcessed(); - boolean getProcessed(); - } - public static final class MutateResponse extends - com.google.protobuf.GeneratedMessage - implements MutateResponseOrBuilder { - // Use MutateResponse.newBuilder() to construct. - private MutateResponse(Builder builder) { - super(builder); - } - private MutateResponse(boolean noInit) {} - - private static final MutateResponse defaultInstance; - public static MutateResponse getDefaultInstance() { - return defaultInstance; - } - - public MutateResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional .Result result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { - return result_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; - } - - // optional bool processed = 2; - public static final int PROCESSED_FIELD_NUMBER = 2; - private boolean processed_; - public boolean hasProcessed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getProcessed() { - return processed_; - } - - private void initFields() { - result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - processed_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, processed_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, processed_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj; - - boolean result = true; - result = result && (hasResult() == other.hasResult()); - if (hasResult()) { - result = result && getResult() - .equals(other.getResult()); - } - result = result && (hasProcessed() == other.hasProcessed()); - if (hasProcessed()) { - result = result && (getProcessed() - == other.getProcessed()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResult()) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResult().hashCode(); - } - if (hasProcessed()) { - hash = (37 * hash) + PROCESSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getProcessed()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - processed_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (resultBuilder_ == null) { - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.processed_ = processed_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; - if (other.hasResult()) { - mergeResult(other.getResult()); - } - if (other.hasProcessed()) { - setProcessed(other.getProcessed()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - if (hasResult()) { - subBuilder.mergeFrom(getResult()); - } - input.readMessage(subBuilder, extensionRegistry); - setResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - processed_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional .Result result = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { - if (resultBuilder_ == null) { - return result_; - } else { - return resultBuilder_.getMessage(); - } - } - public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - result_ = value; - onChanged(); - } else { - resultBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setResult( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - result_ = builderForValue.build(); - onChanged(); - } else { - resultBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { - result_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); - } else { - result_ = value; - } - onChanged(); - } else { - resultBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - onChanged(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getResultFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilder(); - } else { - return result_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( - result_, - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // optional bool processed = 2; - private boolean processed_ ; - public boolean hasProcessed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getProcessed() { - return processed_; - } - public Builder setProcessed(boolean value) { - bitField0_ |= 0x00000002; - processed_ = value; - onChanged(); - return this; - } - public Builder clearProcessed() { - bitField0_ = (bitField0_ & ~0x00000002); - processed_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:MutateResponse) - } - - static { - defaultInstance = new MutateResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MutateResponse) - } - - public interface ScanOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Column column = 1; - java.util.List - getColumnList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); - int getColumnCount(); - java.util.List - getColumnOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index); - - // repeated .NameBytesPair attribute = 2; - java.util.List - getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); - int getAttributeCount(); - java.util.List - getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index); - - // optional bytes startRow = 3; - boolean hasStartRow(); - com.google.protobuf.ByteString getStartRow(); - - // optional bytes stopRow = 4; - boolean hasStopRow(); - com.google.protobuf.ByteString getStopRow(); - - // optional .Filter filter = 5; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - - // optional .TimeRange timeRange = 6; - boolean hasTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - - // optional uint32 maxVersions = 7 [default = 1]; - boolean hasMaxVersions(); - int getMaxVersions(); - - // optional bool cacheBlocks = 8 [default = true]; - boolean hasCacheBlocks(); - boolean getCacheBlocks(); - - // optional uint32 batchSize = 9; - boolean hasBatchSize(); - int getBatchSize(); - - // optional uint64 maxResultSize = 10; - boolean hasMaxResultSize(); - long getMaxResultSize(); - - // optional uint32 storeLimit = 11; - boolean hasStoreLimit(); - int getStoreLimit(); - - // optional uint32 storeOffset = 12; - boolean hasStoreOffset(); - int getStoreOffset(); - } - public static final class Scan extends - com.google.protobuf.GeneratedMessage - implements ScanOrBuilder { - // Use Scan.newBuilder() to construct. - private Scan(Builder builder) { - super(builder); - } - private Scan(boolean noInit) {} - - private static final Scan defaultInstance; - public static Scan getDefaultInstance() { - return defaultInstance; - } - - public Scan getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; - } - - private int bitField0_; - // repeated .Column column = 1; - public static final int COLUMN_FIELD_NUMBER = 1; - private java.util.List column_; - public java.util.List getColumnList() { - return column_; - } - public java.util.List - getColumnOrBuilderList() { - return column_; - } - public int getColumnCount() { - return column_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { - return column_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - return column_.get(index); - } - - // repeated .NameBytesPair attribute = 2; - public static final int ATTRIBUTE_FIELD_NUMBER = 2; - private java.util.List attribute_; - public java.util.List getAttributeList() { - return attribute_; - } - public java.util.List - getAttributeOrBuilderList() { - return attribute_; - } - public int getAttributeCount() { - return attribute_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { - return attribute_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index) { - return attribute_.get(index); - } - - // optional bytes startRow = 3; - public static final int STARTROW_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString startRow_; - public boolean hasStartRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getStartRow() { - return startRow_; - } - - // optional bytes stopRow = 4; - public static final int STOPROW_FIELD_NUMBER = 4; - private com.google.protobuf.ByteString stopRow_; - public boolean hasStopRow() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getStopRow() { - return stopRow_; - } - - // optional .Filter filter = 5; - public static final int FILTER_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; - } - - // optional .TimeRange timeRange = 6; - public static final int TIMERANGE_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - public static final int MAXVERSIONS_FIELD_NUMBER = 7; - private int maxVersions_; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public int getMaxVersions() { - return maxVersions_; - } - - // optional bool cacheBlocks = 8 [default = true]; - public static final int CACHEBLOCKS_FIELD_NUMBER = 8; - private boolean cacheBlocks_; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - - // optional uint32 batchSize = 9; - public static final int BATCHSIZE_FIELD_NUMBER = 9; - private int batchSize_; - public boolean hasBatchSize() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getBatchSize() { - return batchSize_; - } - - // optional uint64 maxResultSize = 10; - public static final int MAXRESULTSIZE_FIELD_NUMBER = 10; - private long maxResultSize_; - public boolean hasMaxResultSize() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public long getMaxResultSize() { - return maxResultSize_; - } - - // optional uint32 storeLimit = 11; - public static final int STORELIMIT_FIELD_NUMBER = 11; - private int storeLimit_; - public boolean hasStoreLimit() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public int getStoreLimit() { - return storeLimit_; - } - - // optional uint32 storeOffset = 12; - public static final int STOREOFFSET_FIELD_NUMBER = 12; - private int storeOffset_; - public boolean hasStoreOffset() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - public int getStoreOffset() { - return storeOffset_; - } - - private void initFields() { - column_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - startRow_ = com.google.protobuf.ByteString.EMPTY; - stopRow_ = com.google.protobuf.ByteString.EMPTY; - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - maxVersions_ = 1; - cacheBlocks_ = true; - batchSize_ = 0; - maxResultSize_ = 0L; - storeLimit_ = 0; - storeOffset_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < column_.size(); i++) { - output.writeMessage(1, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - output.writeMessage(2, attribute_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(3, startRow_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(4, stopRow_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeUInt32(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(8, cacheBlocks_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeUInt32(9, batchSize_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeUInt64(10, maxResultSize_); - } - if (((bitField0_ & 0x00000100) == 0x00000100)) { - output.writeUInt32(11, storeLimit_); - } - if (((bitField0_ & 0x00000200) == 0x00000200)) { - output.writeUInt32(12, storeOffset_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < column_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, attribute_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, startRow_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, stopRow_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, cacheBlocks_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(9, batchSize_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(10, maxResultSize_); - } - if (((bitField0_ & 0x00000100) == 0x00000100)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(11, storeLimit_); - } - if (((bitField0_ & 0x00000200) == 0x00000200)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(12, storeOffset_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj; - - boolean result = true; - result = result && getColumnList() - .equals(other.getColumnList()); - result = result && getAttributeList() - .equals(other.getAttributeList()); - result = result && (hasStartRow() == other.hasStartRow()); - if (hasStartRow()) { - result = result && getStartRow() - .equals(other.getStartRow()); - } - result = result && (hasStopRow() == other.hasStopRow()); - if (hasStopRow()) { - result = result && getStopRow() - .equals(other.getStopRow()); - } - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && (hasMaxVersions() == other.hasMaxVersions()); - if (hasMaxVersions()) { - result = result && (getMaxVersions() - == other.getMaxVersions()); - } - result = result && (hasCacheBlocks() == other.hasCacheBlocks()); - if (hasCacheBlocks()) { - result = result && (getCacheBlocks() - == other.getCacheBlocks()); - } - result = result && (hasBatchSize() == other.hasBatchSize()); - if (hasBatchSize()) { - result = result && (getBatchSize() - == other.getBatchSize()); - } - result = result && (hasMaxResultSize() == other.hasMaxResultSize()); - if (hasMaxResultSize()) { - result = result && (getMaxResultSize() - == other.getMaxResultSize()); - } - result = result && (hasStoreLimit() == other.hasStoreLimit()); - if (hasStoreLimit()) { - result = result && (getStoreLimit() - == other.getStoreLimit()); - } - result = result && (hasStoreOffset() == other.hasStoreOffset()); - if (hasStoreOffset()) { - result = result && (getStoreOffset() - == other.getStoreOffset()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getColumnCount() > 0) { - hash = (37 * hash) + COLUMN_FIELD_NUMBER; - hash = (53 * hash) + getColumnList().hashCode(); - } - if (getAttributeCount() > 0) { - hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; - hash = (53 * hash) + getAttributeList().hashCode(); - } - if (hasStartRow()) { - hash = (37 * hash) + STARTROW_FIELD_NUMBER; - hash = (53 * hash) + getStartRow().hashCode(); - } - if (hasStopRow()) { - hash = (37 * hash) + STOPROW_FIELD_NUMBER; - hash = (53 * hash) + getStopRow().hashCode(); - } - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - if (hasMaxVersions()) { - hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; - hash = (53 * hash) + getMaxVersions(); - } - if (hasCacheBlocks()) { - hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCacheBlocks()); - } - if (hasBatchSize()) { - hash = (37 * hash) + BATCHSIZE_FIELD_NUMBER; - hash = (53 * hash) + getBatchSize(); - } - if (hasMaxResultSize()) { - hash = (37 * hash) + MAXRESULTSIZE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMaxResultSize()); - } - if (hasStoreLimit()) { - hash = (37 * hash) + STORELIMIT_FIELD_NUMBER; - hash = (53 * hash) + getStoreLimit(); - } - if (hasStoreOffset()) { - hash = (37 * hash) + STOREOFFSET_FIELD_NUMBER; - hash = (53 * hash) + getStoreOffset(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnFieldBuilder(); - getAttributeFieldBuilder(); - getFilterFieldBuilder(); - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - columnBuilder_.clear(); - } - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - attributeBuilder_.clear(); - } - startRow_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - stopRow_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - maxVersions_ = 1; - bitField0_ = (bitField0_ & ~0x00000040); - cacheBlocks_ = true; - bitField0_ = (bitField0_ & ~0x00000080); - batchSize_ = 0; - bitField0_ = (bitField0_ & ~0x00000100); - maxResultSize_ = 0L; - bitField0_ = (bitField0_ & ~0x00000200); - storeLimit_ = 0; - bitField0_ = (bitField0_ & ~0x00000400); - storeOffset_ = 0; - bitField0_ = (bitField0_ & ~0x00000800); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (columnBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - column_ = java.util.Collections.unmodifiableList(column_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.column_ = column_; - } else { - result.column_ = columnBuilder_.build(); - } - if (attributeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - attribute_ = java.util.Collections.unmodifiableList(attribute_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.attribute_ = attribute_; - } else { - result.attribute_ = attributeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000001; - } - result.startRow_ = startRow_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000002; - } - result.stopRow_ = stopRow_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000004; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.maxVersions_ = maxVersions_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - result.cacheBlocks_ = cacheBlocks_; - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000040; - } - result.batchSize_ = batchSize_; - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000080; - } - result.maxResultSize_ = maxResultSize_; - if (((from_bitField0_ & 0x00000400) == 0x00000400)) { - to_bitField0_ |= 0x00000100; - } - result.storeLimit_ = storeLimit_; - if (((from_bitField0_ & 0x00000800) == 0x00000800)) { - to_bitField0_ |= 0x00000200; - } - result.storeOffset_ = storeOffset_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; - if (columnBuilder_ == null) { - if (!other.column_.isEmpty()) { - if (column_.isEmpty()) { - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureColumnIsMutable(); - column_.addAll(other.column_); - } - onChanged(); - } - } else { - if (!other.column_.isEmpty()) { - if (columnBuilder_.isEmpty()) { - columnBuilder_.dispose(); - columnBuilder_ = null; - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000001); - columnBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnFieldBuilder() : null; - } else { - columnBuilder_.addAllMessages(other.column_); - } - } - } - if (attributeBuilder_ == null) { - if (!other.attribute_.isEmpty()) { - if (attribute_.isEmpty()) { - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureAttributeIsMutable(); - attribute_.addAll(other.attribute_); - } - onChanged(); - } - } else { - if (!other.attribute_.isEmpty()) { - if (attributeBuilder_.isEmpty()) { - attributeBuilder_.dispose(); - attributeBuilder_ = null; - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000002); - attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributeFieldBuilder() : null; - } else { - attributeBuilder_.addAllMessages(other.attribute_); - } - } - } - if (other.hasStartRow()) { - setStartRow(other.getStartRow()); - } - if (other.hasStopRow()) { - setStopRow(other.getStopRow()); - } - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - if (other.hasMaxVersions()) { - setMaxVersions(other.getMaxVersions()); - } - if (other.hasCacheBlocks()) { - setCacheBlocks(other.getCacheBlocks()); - } - if (other.hasBatchSize()) { - setBatchSize(other.getBatchSize()); - } - if (other.hasMaxResultSize()) { - setMaxResultSize(other.getMaxResultSize()); - } - if (other.hasStoreLimit()) { - setStoreLimit(other.getStoreLimit()); - } - if (other.hasStoreOffset()) { - setStoreOffset(other.getStoreOffset()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumn(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 26: { - bitField0_ |= 0x00000004; - startRow_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - stopRow_ = input.readBytes(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - maxVersions_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - cacheBlocks_ = input.readBool(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - batchSize_ = input.readUInt32(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - maxResultSize_ = input.readUInt64(); - break; - } - case 88: { - bitField0_ |= 0x00000400; - storeLimit_ = input.readUInt32(); - break; - } - case 96: { - bitField0_ |= 0x00000800; - storeOffset_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Column column = 1; - private java.util.List column_ = - java.util.Collections.emptyList(); - private void ensureColumnIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - column_ = new java.util.ArrayList(column_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; - - public java.util.List getColumnList() { - if (columnBuilder_ == null) { - return java.util.Collections.unmodifiableList(column_); - } else { - return columnBuilder_.getMessageList(); - } - } - public int getColumnCount() { - if (columnBuilder_ == null) { - return column_.size(); - } else { - return columnBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { - if (columnBuilder_ == null) { - return column_.get(index); - } else { - return columnBuilder_.getMessage(index); - } - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.set(index, value); - onChanged(); - } else { - columnBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.set(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(value); - onChanged(); - } else { - columnBuilder_.addMessage(value); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(index, value); - onChanged(); - } else { - columnBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumn( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumn( - java.lang.Iterable values) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - super.addAll(values, column_); - onChanged(); - } else { - columnBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumn() { - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - columnBuilder_.clear(); - } - return this; - } - public Builder removeColumn(int index) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.remove(index); - onChanged(); - } else { - columnBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( - int index) { - return getColumnFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - if (columnBuilder_ == null) { - return column_.get(index); } else { - return columnBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnOrBuilderList() { - if (columnBuilder_ != null) { - return columnBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(column_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { - return getColumnFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( - int index) { - return getColumnFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); - } - public java.util.List - getColumnBuilderList() { - return getColumnFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> - getColumnFieldBuilder() { - if (columnBuilder_ == null) { - columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( - column_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - column_ = null; - } - return columnBuilder_; - } - - // repeated .NameBytesPair attribute = 2; - private java.util.List attribute_ = - java.util.Collections.emptyList(); - private void ensureAttributeIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - attribute_ = new java.util.ArrayList(attribute_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - - public java.util.List getAttributeList() { - if (attributeBuilder_ == null) { - return java.util.Collections.unmodifiableList(attribute_); - } else { - return attributeBuilder_.getMessageList(); - } - } - public int getAttributeCount() { - if (attributeBuilder_ == null) { - return attribute_.size(); - } else { - return attributeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); - } else { - return attributeBuilder_.getMessage(index); - } - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.set(index, value); - onChanged(); - } else { - attributeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.set(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(value); - onChanged(); - } else { - attributeBuilder_.addMessage(value); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(index, value); - onChanged(); - } else { - attributeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttribute( - java.lang.Iterable values) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - super.addAll(values, attribute_); - onChanged(); - } else { - attributeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttribute() { - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - attributeBuilder_.clear(); - } - return this; - } - public Builder removeAttribute(int index) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.remove(index); - onChanged(); - } else { - attributeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( - int index) { - return getAttributeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( - int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); } else { - return attributeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributeOrBuilderList() { - if (attributeBuilder_ != null) { - return attributeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attribute_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { - return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( - int index) { - return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public java.util.List - getAttributeBuilderList() { - return getAttributeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getAttributeFieldBuilder() { - if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - attribute_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - attribute_ = null; - } - return attributeBuilder_; - } - - // optional bytes startRow = 3; - private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasStartRow() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getStartRow() { - return startRow_; - } - public Builder setStartRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - startRow_ = value; - onChanged(); - return this; - } - public Builder clearStartRow() { - bitField0_ = (bitField0_ & ~0x00000004); - startRow_ = getDefaultInstance().getStartRow(); - onChanged(); - return this; - } - - // optional bytes stopRow = 4; - private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasStopRow() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getStopRow() { - return stopRow_; - } - public Builder setStopRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - stopRow_ = value; - onChanged(); - return this; - } - public Builder clearStopRow() { - bitField0_ = (bitField0_ & ~0x00000008); - stopRow_ = getDefaultInstance().getStopRow(); - onChanged(); - return this; - } - - // optional .Filter filter = 5; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // optional .TimeRange timeRange = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000020; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - private int maxVersions_ = 1; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getMaxVersions() { - return maxVersions_; - } - public Builder setMaxVersions(int value) { - bitField0_ |= 0x00000040; - maxVersions_ = value; - onChanged(); - return this; - } - public Builder clearMaxVersions() { - bitField0_ = (bitField0_ & ~0x00000040); - maxVersions_ = 1; - onChanged(); - return this; - } - - // optional bool cacheBlocks = 8 [default = true]; - private boolean cacheBlocks_ = true; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - public Builder setCacheBlocks(boolean value) { - bitField0_ |= 0x00000080; - cacheBlocks_ = value; - onChanged(); - return this; - } - public Builder clearCacheBlocks() { - bitField0_ = (bitField0_ & ~0x00000080); - cacheBlocks_ = true; - onChanged(); - return this; - } - - // optional uint32 batchSize = 9; - private int batchSize_ ; - public boolean hasBatchSize() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public int getBatchSize() { - return batchSize_; - } - public Builder setBatchSize(int value) { - bitField0_ |= 0x00000100; - batchSize_ = value; - onChanged(); - return this; - } - public Builder clearBatchSize() { - bitField0_ = (bitField0_ & ~0x00000100); - batchSize_ = 0; - onChanged(); - return this; - } - - // optional uint64 maxResultSize = 10; - private long maxResultSize_ ; - public boolean hasMaxResultSize() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - public long getMaxResultSize() { - return maxResultSize_; - } - public Builder setMaxResultSize(long value) { - bitField0_ |= 0x00000200; - maxResultSize_ = value; - onChanged(); - return this; - } - public Builder clearMaxResultSize() { - bitField0_ = (bitField0_ & ~0x00000200); - maxResultSize_ = 0L; - onChanged(); - return this; - } - - // optional uint32 storeLimit = 11; - private int storeLimit_ ; - public boolean hasStoreLimit() { - return ((bitField0_ & 0x00000400) == 0x00000400); - } - public int getStoreLimit() { - return storeLimit_; - } - public Builder setStoreLimit(int value) { - bitField0_ |= 0x00000400; - storeLimit_ = value; - onChanged(); - return this; - } - public Builder clearStoreLimit() { - bitField0_ = (bitField0_ & ~0x00000400); - storeLimit_ = 0; - onChanged(); - return this; - } - - // optional uint32 storeOffset = 12; - private int storeOffset_ ; - public boolean hasStoreOffset() { - return ((bitField0_ & 0x00000800) == 0x00000800); - } - public int getStoreOffset() { - return storeOffset_; - } - public Builder setStoreOffset(int value) { - bitField0_ |= 0x00000800; - storeOffset_ = value; - onChanged(); - return this; - } - public Builder clearStoreOffset() { - bitField0_ = (bitField0_ & ~0x00000800); - storeOffset_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Scan) - } - - static { - defaultInstance = new Scan(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Scan) - } - - public interface ScanRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional .Scan scan = 2; - boolean hasScan(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - - // optional uint64 scannerId = 3; - boolean hasScannerId(); - long getScannerId(); - - // optional uint32 numberOfRows = 4; - boolean hasNumberOfRows(); - int getNumberOfRows(); - - // optional bool closeScanner = 5; - boolean hasCloseScanner(); - boolean getCloseScanner(); - - // optional uint64 nextCallSeq = 6; - boolean hasNextCallSeq(); - long getNextCallSeq(); - } - public static final class ScanRequest extends - com.google.protobuf.GeneratedMessage - implements ScanRequestOrBuilder { - // Use ScanRequest.newBuilder() to construct. - private ScanRequest(Builder builder) { - super(builder); - } - private ScanRequest(boolean noInit) {} - - private static final ScanRequest defaultInstance; - public static ScanRequest getDefaultInstance() { - return defaultInstance; - } - - public ScanRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; - } - - private int bitField0_; - // optional .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional .Scan scan = 2; - public static final int SCAN_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - return scan_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // optional uint64 scannerId = 3; - public static final int SCANNERID_FIELD_NUMBER = 3; - private long scannerId_; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getScannerId() { - return scannerId_; - } - - // optional uint32 numberOfRows = 4; - public static final int NUMBEROFROWS_FIELD_NUMBER = 4; - private int numberOfRows_; - public boolean hasNumberOfRows() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getNumberOfRows() { - return numberOfRows_; - } - - // optional bool closeScanner = 5; - public static final int CLOSESCANNER_FIELD_NUMBER = 5; - private boolean closeScanner_; - public boolean hasCloseScanner() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getCloseScanner() { - return closeScanner_; - } - - // optional uint64 nextCallSeq = 6; - public static final int NEXTCALLSEQ_FIELD_NUMBER = 6; - private long nextCallSeq_; - public boolean hasNextCallSeq() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public long getNextCallSeq() { - return nextCallSeq_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - scannerId_ = 0L; - numberOfRows_ = 0; - closeScanner_ = false; - nextCallSeq_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasRegion()) { - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasScan()) { - if (!getScan().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, scannerId_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt32(4, numberOfRows_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(5, closeScanner_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeUInt64(6, nextCallSeq_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, scannerId_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, numberOfRows_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(5, closeScanner_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(6, nextCallSeq_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasScan() == other.hasScan()); - if (hasScan()) { - result = result && getScan() - .equals(other.getScan()); - } - result = result && (hasScannerId() == other.hasScannerId()); - if (hasScannerId()) { - result = result && (getScannerId() - == other.getScannerId()); - } - result = result && (hasNumberOfRows() == other.hasNumberOfRows()); - if (hasNumberOfRows()) { - result = result && (getNumberOfRows() - == other.getNumberOfRows()); - } - result = result && (hasCloseScanner() == other.hasCloseScanner()); - if (hasCloseScanner()) { - result = result && (getCloseScanner() - == other.getCloseScanner()); - } - result = result && (hasNextCallSeq() == other.hasNextCallSeq()); - if (hasNextCallSeq()) { - result = result && (getNextCallSeq() - == other.getNextCallSeq()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasScan()) { - hash = (37 * hash) + SCAN_FIELD_NUMBER; - hash = (53 * hash) + getScan().hashCode(); - } - if (hasScannerId()) { - hash = (37 * hash) + SCANNERID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); - } - if (hasNumberOfRows()) { - hash = (37 * hash) + NUMBEROFROWS_FIELD_NUMBER; - hash = (53 * hash) + getNumberOfRows(); - } - if (hasCloseScanner()) { - hash = (37 * hash) + CLOSESCANNER_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCloseScanner()); - } - if (hasNextCallSeq()) { - hash = (37 * hash) + NEXTCALLSEQ_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNextCallSeq()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getScanFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - scannerId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - numberOfRows_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - closeScanner_ = false; - bitField0_ = (bitField0_ & ~0x00000010); - nextCallSeq_ = 0L; - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (scanBuilder_ == null) { - result.scan_ = scan_; - } else { - result.scan_ = scanBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.scannerId_ = scannerId_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.numberOfRows_ = numberOfRows_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.closeScanner_ = closeScanner_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.nextCallSeq_ = nextCallSeq_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasScan()) { - mergeScan(other.getScan()); - } - if (other.hasScannerId()) { - setScannerId(other.getScannerId()); - } - if (other.hasNumberOfRows()) { - setNumberOfRows(other.getNumberOfRows()); - } - if (other.hasCloseScanner()) { - setCloseScanner(other.getCloseScanner()); - } - if (other.hasNextCallSeq()) { - setNextCallSeq(other.getNextCallSeq()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasRegion()) { - if (!getRegion().isInitialized()) { - - return false; - } - } - if (hasScan()) { - if (!getScan().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); - if (hasScan()) { - subBuilder.mergeFrom(getScan()); - } - input.readMessage(subBuilder, extensionRegistry); - setScan(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - scannerId_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - numberOfRows_ = input.readUInt32(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - closeScanner_ = input.readBool(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - nextCallSeq_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // optional .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional .Scan scan = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - if (scanBuilder_ == null) { - return scan_; - } else { - return scanBuilder_.getMessage(); - } - } - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - scan_ = value; - onChanged(); - } else { - scanBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { - if (scanBuilder_ == null) { - scan_ = builderForValue.build(); - onChanged(); - } else { - scanBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { - scan_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); - } else { - scan_ = value; - } - onChanged(); - } else { - scanBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearScan() { - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - onChanged(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getScanFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - if (scanBuilder_ != null) { - return scanBuilder_.getMessageOrBuilder(); - } else { - return scan_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> - getScanFieldBuilder() { - if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( - scan_, - getParentForChildren(), - isClean()); - scan_ = null; - } - return scanBuilder_; - } - - // optional uint64 scannerId = 3; - private long scannerId_ ; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getScannerId() { - return scannerId_; - } - public Builder setScannerId(long value) { - bitField0_ |= 0x00000004; - scannerId_ = value; - onChanged(); - return this; - } - public Builder clearScannerId() { - bitField0_ = (bitField0_ & ~0x00000004); - scannerId_ = 0L; - onChanged(); - return this; - } - - // optional uint32 numberOfRows = 4; - private int numberOfRows_ ; - public boolean hasNumberOfRows() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getNumberOfRows() { - return numberOfRows_; - } - public Builder setNumberOfRows(int value) { - bitField0_ |= 0x00000008; - numberOfRows_ = value; - onChanged(); - return this; - } - public Builder clearNumberOfRows() { - bitField0_ = (bitField0_ & ~0x00000008); - numberOfRows_ = 0; - onChanged(); - return this; - } - - // optional bool closeScanner = 5; - private boolean closeScanner_ ; - public boolean hasCloseScanner() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getCloseScanner() { - return closeScanner_; - } - public Builder setCloseScanner(boolean value) { - bitField0_ |= 0x00000010; - closeScanner_ = value; - onChanged(); - return this; - } - public Builder clearCloseScanner() { - bitField0_ = (bitField0_ & ~0x00000010); - closeScanner_ = false; - onChanged(); - return this; - } - - // optional uint64 nextCallSeq = 6; - private long nextCallSeq_ ; - public boolean hasNextCallSeq() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public long getNextCallSeq() { - return nextCallSeq_; - } - public Builder setNextCallSeq(long value) { - bitField0_ |= 0x00000020; - nextCallSeq_ = value; - onChanged(); - return this; - } - public Builder clearNextCallSeq() { - bitField0_ = (bitField0_ & ~0x00000020); - nextCallSeq_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ScanRequest) - } - - static { - defaultInstance = new ScanRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ScanRequest) - } - - public interface ScanResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Result result = 1; - java.util.List - getResultList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index); - int getResultCount(); - java.util.List - getResultOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( - int index); - - // optional uint64 scannerId = 2; - boolean hasScannerId(); - long getScannerId(); - - // optional bool moreResults = 3; - boolean hasMoreResults(); - boolean getMoreResults(); - - // optional uint32 ttl = 4; - boolean hasTtl(); - int getTtl(); - } - public static final class ScanResponse extends - com.google.protobuf.GeneratedMessage - implements ScanResponseOrBuilder { - // Use ScanResponse.newBuilder() to construct. - private ScanResponse(Builder builder) { - super(builder); - } - private ScanResponse(boolean noInit) {} - - private static final ScanResponse defaultInstance; - public static ScanResponse getDefaultInstance() { - return defaultInstance; - } - - public ScanResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; - } - - private int bitField0_; - // repeated .Result result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private java.util.List result_; - public java.util.List getResultList() { - return result_; - } - public java.util.List - getResultOrBuilderList() { - return result_; - } - public int getResultCount() { - return result_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { - return result_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( - int index) { - return result_.get(index); - } - - // optional uint64 scannerId = 2; - public static final int SCANNERID_FIELD_NUMBER = 2; - private long scannerId_; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getScannerId() { - return scannerId_; - } - - // optional bool moreResults = 3; - public static final int MORERESULTS_FIELD_NUMBER = 3; - private boolean moreResults_; - public boolean hasMoreResults() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMoreResults() { - return moreResults_; - } - - // optional uint32 ttl = 4; - public static final int TTL_FIELD_NUMBER = 4; - private int ttl_; - public boolean hasTtl() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getTtl() { - return ttl_; - } - - private void initFields() { - result_ = java.util.Collections.emptyList(); - scannerId_ = 0L; - moreResults_ = false; - ttl_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < result_.size(); i++) { - output.writeMessage(1, result_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(2, scannerId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(3, moreResults_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt32(4, ttl_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < result_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, scannerId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, moreResults_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, ttl_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj; - - boolean result = true; - result = result && getResultList() - .equals(other.getResultList()); - result = result && (hasScannerId() == other.hasScannerId()); - if (hasScannerId()) { - result = result && (getScannerId() - == other.getScannerId()); - } - result = result && (hasMoreResults() == other.hasMoreResults()); - if (hasMoreResults()) { - result = result && (getMoreResults() - == other.getMoreResults()); - } - result = result && (hasTtl() == other.hasTtl()); - if (hasTtl()) { - result = result && (getTtl() - == other.getTtl()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResultCount() > 0) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResultList().hashCode(); - } - if (hasScannerId()) { - hash = (37 * hash) + SCANNERID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); - } - if (hasMoreResults()) { - hash = (37 * hash) + MORERESULTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMoreResults()); - } - if (hasTtl()) { - hash = (37 * hash) + TTL_FIELD_NUMBER; - hash = (53 * hash) + getTtl(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - resultBuilder_.clear(); - } - scannerId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - moreResults_ = false; - bitField0_ = (bitField0_ & ~0x00000004); - ttl_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = java.util.Collections.unmodifiableList(result_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.scannerId_ = scannerId_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.moreResults_ = moreResults_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000004; - } - result.ttl_ = ttl_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this; - if (resultBuilder_ == null) { - if (!other.result_.isEmpty()) { - if (result_.isEmpty()) { - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureResultIsMutable(); - result_.addAll(other.result_); - } - onChanged(); - } - } else { - if (!other.result_.isEmpty()) { - if (resultBuilder_.isEmpty()) { - resultBuilder_.dispose(); - resultBuilder_ = null; - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - resultBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResultFieldBuilder() : null; - } else { - resultBuilder_.addAllMessages(other.result_); - } - } - } - if (other.hasScannerId()) { - setScannerId(other.getScannerId()); - } - if (other.hasMoreResults()) { - setMoreResults(other.getMoreResults()); - } - if (other.hasTtl()) { - setTtl(other.getTtl()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - scannerId_ = input.readUInt64(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - moreResults_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - ttl_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Result result = 1; - private java.util.List result_ = - java.util.Collections.emptyList(); - private void ensureResultIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new java.util.ArrayList(result_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; - - public java.util.List getResultList() { - if (resultBuilder_ == null) { - return java.util.Collections.unmodifiableList(result_); - } else { - return resultBuilder_.getMessageList(); - } - } - public int getResultCount() { - if (resultBuilder_ == null) { - return result_.size(); - } else { - return resultBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { - if (resultBuilder_ == null) { - return result_.get(index); - } else { - return resultBuilder_.getMessage(index); - } - } - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.set(index, value); - onChanged(); - } else { - resultBuilder_.setMessage(index, value); - } - return this; - } - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.set(index, builderForValue.build()); - onChanged(); - } else { - resultBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - } else { - resultBuilder_.addMessage(value); - } - return this; - } - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(index, value); - onChanged(); - } else { - resultBuilder_.addMessage(index, value); - } - return this; - } - public Builder addResult( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(builderForValue.build()); - onChanged(); - } else { - resultBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(index, builderForValue.build()); - onChanged(); - } else { - resultBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllResult( - java.lang.Iterable values) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - super.addAll(values, result_); - onChanged(); - } else { - resultBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - resultBuilder_.clear(); - } - return this; - } - public Builder removeResult(int index) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.remove(index); - onChanged(); - } else { - resultBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder( - int index) { - return getResultFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( - int index) { - if (resultBuilder_ == null) { - return result_.get(index); } else { - return resultBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getResultOrBuilderList() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(result_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder() { - return getResultFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder( - int index) { - return getResultFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); - } - public java.util.List - getResultBuilderList() { - return getResultFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( - result_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // optional uint64 scannerId = 2; - private long scannerId_ ; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getScannerId() { - return scannerId_; - } - public Builder setScannerId(long value) { - bitField0_ |= 0x00000002; - scannerId_ = value; - onChanged(); - return this; - } - public Builder clearScannerId() { - bitField0_ = (bitField0_ & ~0x00000002); - scannerId_ = 0L; - onChanged(); - return this; - } - - // optional bool moreResults = 3; - private boolean moreResults_ ; - public boolean hasMoreResults() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getMoreResults() { - return moreResults_; - } - public Builder setMoreResults(boolean value) { - bitField0_ |= 0x00000004; - moreResults_ = value; - onChanged(); - return this; - } - public Builder clearMoreResults() { - bitField0_ = (bitField0_ & ~0x00000004); - moreResults_ = false; - onChanged(); - return this; - } - - // optional uint32 ttl = 4; - private int ttl_ ; - public boolean hasTtl() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getTtl() { - return ttl_; - } - public Builder setTtl(int value) { - bitField0_ |= 0x00000008; - ttl_ = value; - onChanged(); - return this; - } - public Builder clearTtl() { - bitField0_ = (bitField0_ & ~0x00000008); - ttl_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ScanResponse) - } - - static { - defaultInstance = new ScanResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ScanResponse) - } - - public interface LockRowRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated bytes row = 2; - java.util.List getRowList(); - int getRowCount(); - com.google.protobuf.ByteString getRow(int index); - } - public static final class LockRowRequest extends - com.google.protobuf.GeneratedMessage - implements LockRowRequestOrBuilder { - // Use LockRowRequest.newBuilder() to construct. - private LockRowRequest(Builder builder) { - super(builder); - } - private LockRowRequest(boolean noInit) {} - - private static final LockRowRequest defaultInstance; - public static LockRowRequest getDefaultInstance() { - return defaultInstance; - } - - public LockRowRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated bytes row = 2; - public static final int ROW_FIELD_NUMBER = 2; - private java.util.List row_; - public java.util.List - getRowList() { - return row_; - } - public int getRowCount() { - return row_.size(); - } - public com.google.protobuf.ByteString getRow(int index) { - return row_.get(index); - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - row_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < row_.size(); i++) { - output.writeBytes(2, row_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - { - int dataSize = 0; - for (int i = 0; i < row_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(row_.get(i)); - } - size += dataSize; - size += 1 * getRowList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getRowList() - .equals(other.getRowList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getRowCount() > 0) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRowList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - row_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - row_ = java.util.Collections.unmodifiableList(row_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.row_ = row_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (!other.row_.isEmpty()) { - if (row_.isEmpty()) { - row_ = other.row_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureRowIsMutable(); - row_.addAll(other.row_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - ensureRowIsMutable(); - row_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated bytes row = 2; - private java.util.List row_ = java.util.Collections.emptyList();; - private void ensureRowIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - row_ = new java.util.ArrayList(row_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getRowList() { - return java.util.Collections.unmodifiableList(row_); - } - public int getRowCount() { - return row_.size(); - } - public com.google.protobuf.ByteString getRow(int index) { - return row_.get(index); - } - public Builder setRow( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRowIsMutable(); - row_.set(index, value); - onChanged(); - return this; - } - public Builder addRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRowIsMutable(); - row_.add(value); - onChanged(); - return this; - } - public Builder addAllRow( - java.lang.Iterable values) { - ensureRowIsMutable(); - super.addAll(values, row_); - onChanged(); - return this; - } - public Builder clearRow() { - row_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:LockRowRequest) - } - - static { - defaultInstance = new LockRowRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:LockRowRequest) - } - - public interface LockRowResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 lockId = 1; - boolean hasLockId(); - long getLockId(); - - // optional uint32 ttl = 2; - boolean hasTtl(); - int getTtl(); - } - public static final class LockRowResponse extends - com.google.protobuf.GeneratedMessage - implements LockRowResponseOrBuilder { - // Use LockRowResponse.newBuilder() to construct. - private LockRowResponse(Builder builder) { - super(builder); - } - private LockRowResponse(boolean noInit) {} - - private static final LockRowResponse defaultInstance; - public static LockRowResponse getDefaultInstance() { - return defaultInstance; - } - - public LockRowResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 lockId = 1; - public static final int LOCKID_FIELD_NUMBER = 1; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLockId() { - return lockId_; - } - - // optional uint32 ttl = 2; - public static final int TTL_FIELD_NUMBER = 2; - private int ttl_; - public boolean hasTtl() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTtl() { - return ttl_; - } - - private void initFields() { - lockId_ = 0L; - ttl_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLockId()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, lockId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, ttl_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, lockId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, ttl_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) obj; - - boolean result = true; - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && (hasTtl() == other.hasTtl()); - if (hasTtl()) { - result = result && (getTtl() - == other.getTtl()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - if (hasTtl()) { - hash = (37 * hash) + TTL_FIELD_NUMBER; - hash = (53 * hash) + getTtl(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - ttl_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lockId_ = lockId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.ttl_ = ttl_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()) return this; - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - if (other.hasTtl()) { - setTtl(other.getTtl()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLockId()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lockId_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - ttl_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 lockId = 1; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000001; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000001); - lockId_ = 0L; - onChanged(); - return this; - } - - // optional uint32 ttl = 2; - private int ttl_ ; - public boolean hasTtl() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTtl() { - return ttl_; - } - public Builder setTtl(int value) { - bitField0_ |= 0x00000002; - ttl_ = value; - onChanged(); - return this; - } - public Builder clearTtl() { - bitField0_ = (bitField0_ & ~0x00000002); - ttl_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:LockRowResponse) - } - - static { - defaultInstance = new LockRowResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:LockRowResponse) - } - - public interface UnlockRowRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required uint64 lockId = 2; - boolean hasLockId(); - long getLockId(); - } - public static final class UnlockRowRequest extends - com.google.protobuf.GeneratedMessage - implements UnlockRowRequestOrBuilder { - // Use UnlockRowRequest.newBuilder() to construct. - private UnlockRowRequest(Builder builder) { - super(builder); - } - private UnlockRowRequest(boolean noInit) {} - - private static final UnlockRowRequest defaultInstance; - public static UnlockRowRequest getDefaultInstance() { - return defaultInstance; - } - - public UnlockRowRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required uint64 lockId = 2; - public static final int LOCKID_FIELD_NUMBER = 2; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getLockId() { - return lockId_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - lockId_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasLockId()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, lockId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, lockId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.lockId_ = lockId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasLockId()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - lockId_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required uint64 lockId = 2; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000002; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000002); - lockId_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UnlockRowRequest) - } - - static { - defaultInstance = new UnlockRowRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UnlockRowRequest) - } - - public interface UnlockRowResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class UnlockRowResponse extends - com.google.protobuf.GeneratedMessage - implements UnlockRowResponseOrBuilder { - // Use UnlockRowResponse.newBuilder() to construct. - private UnlockRowResponse(Builder builder) { - super(builder); - } - private UnlockRowResponse(boolean noInit) {} - - private static final UnlockRowResponse defaultInstance; - public static UnlockRowResponse getDefaultInstance() { - return defaultInstance; - } - - public UnlockRowResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:UnlockRowResponse) - } - - static { - defaultInstance = new UnlockRowResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UnlockRowResponse) - } - - public interface BulkLoadHFileRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - java.util.List - getFamilyPathList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); - int getFamilyPathCount(); - java.util.List - getFamilyPathOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index); - - // optional bool assignSeqNum = 3; - boolean hasAssignSeqNum(); - boolean getAssignSeqNum(); - } - public static final class BulkLoadHFileRequest extends - com.google.protobuf.GeneratedMessage - implements BulkLoadHFileRequestOrBuilder { - // Use BulkLoadHFileRequest.newBuilder() to construct. - private BulkLoadHFileRequest(Builder builder) { - super(builder); - } - private BulkLoadHFileRequest(boolean noInit) {} - - private static final BulkLoadHFileRequest defaultInstance; - public static BulkLoadHFileRequest getDefaultInstance() { - return defaultInstance; - } - - public BulkLoadHFileRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; - } - - public interface FamilyPathOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required string path = 2; - boolean hasPath(); - String getPath(); - } - public static final class FamilyPath extends - com.google.protobuf.GeneratedMessage - implements FamilyPathOrBuilder { - // Use FamilyPath.newBuilder() to construct. - private FamilyPath(Builder builder) { - super(builder); - } - private FamilyPath(boolean noInit) {} - - private static final FamilyPath defaultInstance; - public static FamilyPath getDefaultInstance() { - return defaultInstance; - } - - public FamilyPath getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required string path = 2; - public static final int PATH_FIELD_NUMBER = 2; - private java.lang.Object path_; - public boolean hasPath() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getPath() { - java.lang.Object ref = path_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - path_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getPathBytes() { - java.lang.Object ref = path_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - path_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - path_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasPath()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getPathBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getPathBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasPath() == other.hasPath()); - if (hasPath()) { - result = result && getPath() - .equals(other.getPath()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasPath()) { - hash = (37 * hash) + PATH_FIELD_NUMBER; - hash = (53 * hash) + getPath().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - path_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.path_ = path_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasPath()) { - setPath(other.getPath()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - if (!hasPath()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - path_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required string path = 2; - private java.lang.Object path_ = ""; - public boolean hasPath() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getPath() { - java.lang.Object ref = path_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - path_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setPath(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - path_ = value; - onChanged(); - return this; - } - public Builder clearPath() { - bitField0_ = (bitField0_ & ~0x00000002); - path_ = getDefaultInstance().getPath(); - onChanged(); - return this; - } - void setPath(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - path_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath) - } - - static { - defaultInstance = new FamilyPath(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath) - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - public static final int FAMILYPATH_FIELD_NUMBER = 2; - private java.util.List familyPath_; - public java.util.List getFamilyPathList() { - return familyPath_; - } - public java.util.List - getFamilyPathOrBuilderList() { - return familyPath_; - } - public int getFamilyPathCount() { - return familyPath_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { - return familyPath_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index) { - return familyPath_.get(index); - } - - // optional bool assignSeqNum = 3; - public static final int ASSIGNSEQNUM_FIELD_NUMBER = 3; - private boolean assignSeqNum_; - public boolean hasAssignSeqNum() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getAssignSeqNum() { - return assignSeqNum_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - familyPath_ = java.util.Collections.emptyList(); - assignSeqNum_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getFamilyPathCount(); i++) { - if (!getFamilyPath(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < familyPath_.size(); i++) { - output.writeMessage(2, familyPath_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(3, assignSeqNum_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - for (int i = 0; i < familyPath_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, familyPath_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, assignSeqNum_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getFamilyPathList() - .equals(other.getFamilyPathList()); - result = result && (hasAssignSeqNum() == other.hasAssignSeqNum()); - if (hasAssignSeqNum()) { - result = result && (getAssignSeqNum() - == other.getAssignSeqNum()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getFamilyPathCount() > 0) { - hash = (37 * hash) + FAMILYPATH_FIELD_NUMBER; - hash = (53 * hash) + getFamilyPathList().hashCode(); - } - if (hasAssignSeqNum()) { - hash = (37 * hash) + ASSIGNSEQNUM_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAssignSeqNum()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getFamilyPathFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (familyPathBuilder_ == null) { - familyPath_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - familyPathBuilder_.clear(); - } - assignSeqNum_ = false; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (familyPathBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - familyPath_ = java.util.Collections.unmodifiableList(familyPath_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.familyPath_ = familyPath_; - } else { - result.familyPath_ = familyPathBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.assignSeqNum_ = assignSeqNum_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (familyPathBuilder_ == null) { - if (!other.familyPath_.isEmpty()) { - if (familyPath_.isEmpty()) { - familyPath_ = other.familyPath_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFamilyPathIsMutable(); - familyPath_.addAll(other.familyPath_); - } - onChanged(); - } - } else { - if (!other.familyPath_.isEmpty()) { - if (familyPathBuilder_.isEmpty()) { - familyPathBuilder_.dispose(); - familyPathBuilder_ = null; - familyPath_ = other.familyPath_; - bitField0_ = (bitField0_ & ~0x00000002); - familyPathBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFamilyPathFieldBuilder() : null; - } else { - familyPathBuilder_.addAllMessages(other.familyPath_); - } - } - } - if (other.hasAssignSeqNum()) { - setAssignSeqNum(other.getAssignSeqNum()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - for (int i = 0; i < getFamilyPathCount(); i++) { - if (!getFamilyPath(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyPath(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - assignSeqNum_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - private java.util.List familyPath_ = - java.util.Collections.emptyList(); - private void ensureFamilyPathIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyPath_ = new java.util.ArrayList(familyPath_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; - - public java.util.List getFamilyPathList() { - if (familyPathBuilder_ == null) { - return java.util.Collections.unmodifiableList(familyPath_); - } else { - return familyPathBuilder_.getMessageList(); - } - } - public int getFamilyPathCount() { - if (familyPathBuilder_ == null) { - return familyPath_.size(); - } else { - return familyPathBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { - if (familyPathBuilder_ == null) { - return familyPath_.get(index); - } else { - return familyPathBuilder_.getMessage(index); - } - } - public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.set(index, value); - onChanged(); - } else { - familyPathBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.set(index, builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.add(value); - onChanged(); - } else { - familyPathBuilder_.addMessage(value); - } - return this; - } - public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.add(index, value); - onChanged(); - } else { - familyPathBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFamilyPath( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.add(builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.add(index, builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFamilyPath( - java.lang.Iterable values) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - super.addAll(values, familyPath_); - onChanged(); - } else { - familyPathBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFamilyPath() { - if (familyPathBuilder_ == null) { - familyPath_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - familyPathBuilder_.clear(); - } - return this; - } - public Builder removeFamilyPath(int index) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.remove(index); - onChanged(); - } else { - familyPathBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( - int index) { - return getFamilyPathFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index) { - if (familyPathBuilder_ == null) { - return familyPath_.get(index); } else { - return familyPathBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFamilyPathOrBuilderList() { - if (familyPathBuilder_ != null) { - return familyPathBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(familyPath_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { - return getFamilyPathFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( - int index) { - return getFamilyPathFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); - } - public java.util.List - getFamilyPathBuilderList() { - return getFamilyPathFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> - getFamilyPathFieldBuilder() { - if (familyPathBuilder_ == null) { - familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( - familyPath_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - familyPath_ = null; - } - return familyPathBuilder_; - } - - // optional bool assignSeqNum = 3; - private boolean assignSeqNum_ ; - public boolean hasAssignSeqNum() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getAssignSeqNum() { - return assignSeqNum_; - } - public Builder setAssignSeqNum(boolean value) { - bitField0_ |= 0x00000004; - assignSeqNum_ = value; - onChanged(); - return this; - } - public Builder clearAssignSeqNum() { - bitField0_ = (bitField0_ & ~0x00000004); - assignSeqNum_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest) - } - - static { - defaultInstance = new BulkLoadHFileRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest) - } - - public interface BulkLoadHFileResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool loaded = 1; - boolean hasLoaded(); - boolean getLoaded(); - } - public static final class BulkLoadHFileResponse extends - com.google.protobuf.GeneratedMessage - implements BulkLoadHFileResponseOrBuilder { - // Use BulkLoadHFileResponse.newBuilder() to construct. - private BulkLoadHFileResponse(Builder builder) { - super(builder); - } - private BulkLoadHFileResponse(boolean noInit) {} - - private static final BulkLoadHFileResponse defaultInstance; - public static BulkLoadHFileResponse getDefaultInstance() { - return defaultInstance; - } - - public BulkLoadHFileResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool loaded = 1; - public static final int LOADED_FIELD_NUMBER = 1; - private boolean loaded_; - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getLoaded() { - return loaded_; - } - - private void initFields() { - loaded_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLoaded()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, loaded_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, loaded_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; - - boolean result = true; - result = result && (hasLoaded() == other.hasLoaded()); - if (hasLoaded()) { - result = result && (getLoaded() - == other.getLoaded()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLoaded()) { - hash = (37 * hash) + LOADED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLoaded()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - loaded_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.loaded_ = loaded_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; - if (other.hasLoaded()) { - setLoaded(other.getLoaded()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLoaded()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - loaded_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool loaded = 1; - private boolean loaded_ ; - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getLoaded() { - return loaded_; - } - public Builder setLoaded(boolean value) { - bitField0_ |= 0x00000001; - loaded_ = value; - onChanged(); - return this; - } - public Builder clearLoaded() { - bitField0_ = (bitField0_ & ~0x00000001); - loaded_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse) - } - - static { - defaultInstance = new BulkLoadHFileResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) - } - - public interface ExecOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required string protocolName = 2; - boolean hasProtocolName(); - String getProtocolName(); - - // required string methodName = 3; - boolean hasMethodName(); - String getMethodName(); - - // repeated .NameStringPair property = 4; - java.util.List - getPropertyList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index); - int getPropertyCount(); - java.util.List - getPropertyOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( - int index); - - // repeated .NameBytesPair parameter = 5; - java.util.List - getParameterList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index); - int getParameterCount(); - java.util.List - getParameterOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( - int index); - } - public static final class Exec extends - com.google.protobuf.GeneratedMessage - implements ExecOrBuilder { - // Use Exec.newBuilder() to construct. - private Exec(Builder builder) { - super(builder); - } - private Exec(boolean noInit) {} - - private static final Exec defaultInstance; - public static Exec getDefaultInstance() { - return defaultInstance; - } - - public Exec getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required string protocolName = 2; - public static final int PROTOCOLNAME_FIELD_NUMBER = 2; - private java.lang.Object protocolName_; - public boolean hasProtocolName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getProtocolName() { - java.lang.Object ref = protocolName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - protocolName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getProtocolNameBytes() { - java.lang.Object ref = protocolName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - protocolName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string methodName = 3; - public static final int METHODNAME_FIELD_NUMBER = 3; - private java.lang.Object methodName_; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - methodName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getMethodNameBytes() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - methodName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // repeated .NameStringPair property = 4; - public static final int PROPERTY_FIELD_NUMBER = 4; - private java.util.List property_; - public java.util.List getPropertyList() { - return property_; - } - public java.util.List - getPropertyOrBuilderList() { - return property_; - } - public int getPropertyCount() { - return property_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { - return property_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( - int index) { - return property_.get(index); - } - - // repeated .NameBytesPair parameter = 5; - public static final int PARAMETER_FIELD_NUMBER = 5; - private java.util.List parameter_; - public java.util.List getParameterList() { - return parameter_; - } - public java.util.List - getParameterOrBuilderList() { - return parameter_; - } - public int getParameterCount() { - return parameter_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { - return parameter_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( - int index) { - return parameter_.get(index); - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - protocolName_ = ""; - methodName_ = ""; - property_ = java.util.Collections.emptyList(); - parameter_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasProtocolName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMethodName()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getPropertyCount(); i++) { - if (!getProperty(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getParameterCount(); i++) { - if (!getParameter(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getProtocolNameBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getMethodNameBytes()); - } - for (int i = 0; i < property_.size(); i++) { - output.writeMessage(4, property_.get(i)); - } - for (int i = 0; i < parameter_.size(); i++) { - output.writeMessage(5, parameter_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getProtocolNameBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getMethodNameBytes()); - } - for (int i = 0; i < property_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, property_.get(i)); - } - for (int i = 0; i < parameter_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, parameter_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasProtocolName() == other.hasProtocolName()); - if (hasProtocolName()) { - result = result && getProtocolName() - .equals(other.getProtocolName()); - } - result = result && (hasMethodName() == other.hasMethodName()); - if (hasMethodName()) { - result = result && getMethodName() - .equals(other.getMethodName()); - } - result = result && getPropertyList() - .equals(other.getPropertyList()); - result = result && getParameterList() - .equals(other.getParameterList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasProtocolName()) { - hash = (37 * hash) + PROTOCOLNAME_FIELD_NUMBER; - hash = (53 * hash) + getProtocolName().hashCode(); - } - if (hasMethodName()) { - hash = (37 * hash) + METHODNAME_FIELD_NUMBER; - hash = (53 * hash) + getMethodName().hashCode(); - } - if (getPropertyCount() > 0) { - hash = (37 * hash) + PROPERTY_FIELD_NUMBER; - hash = (53 * hash) + getPropertyList().hashCode(); - } - if (getParameterCount() > 0) { - hash = (37 * hash) + PARAMETER_FIELD_NUMBER; - hash = (53 * hash) + getParameterList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPropertyFieldBuilder(); - getParameterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - protocolName_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - methodName_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); - if (propertyBuilder_ == null) { - property_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - propertyBuilder_.clear(); - } - if (parameterBuilder_ == null) { - parameter_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - } else { - parameterBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.protocolName_ = protocolName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.methodName_ = methodName_; - if (propertyBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - property_ = java.util.Collections.unmodifiableList(property_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.property_ = property_; - } else { - result.property_ = propertyBuilder_.build(); - } - if (parameterBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010)) { - parameter_ = java.util.Collections.unmodifiableList(parameter_); - bitField0_ = (bitField0_ & ~0x00000010); - } - result.parameter_ = parameter_; - } else { - result.parameter_ = parameterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasProtocolName()) { - setProtocolName(other.getProtocolName()); - } - if (other.hasMethodName()) { - setMethodName(other.getMethodName()); - } - if (propertyBuilder_ == null) { - if (!other.property_.isEmpty()) { - if (property_.isEmpty()) { - property_ = other.property_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensurePropertyIsMutable(); - property_.addAll(other.property_); - } - onChanged(); - } - } else { - if (!other.property_.isEmpty()) { - if (propertyBuilder_.isEmpty()) { - propertyBuilder_.dispose(); - propertyBuilder_ = null; - property_ = other.property_; - bitField0_ = (bitField0_ & ~0x00000008); - propertyBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getPropertyFieldBuilder() : null; - } else { - propertyBuilder_.addAllMessages(other.property_); - } - } - } - if (parameterBuilder_ == null) { - if (!other.parameter_.isEmpty()) { - if (parameter_.isEmpty()) { - parameter_ = other.parameter_; - bitField0_ = (bitField0_ & ~0x00000010); - } else { - ensureParameterIsMutable(); - parameter_.addAll(other.parameter_); - } - onChanged(); - } - } else { - if (!other.parameter_.isEmpty()) { - if (parameterBuilder_.isEmpty()) { - parameterBuilder_.dispose(); - parameterBuilder_ = null; - parameter_ = other.parameter_; - bitField0_ = (bitField0_ & ~0x00000010); - parameterBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getParameterFieldBuilder() : null; - } else { - parameterBuilder_.addAllMessages(other.parameter_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasProtocolName()) { - - return false; - } - if (!hasMethodName()) { - - return false; - } - for (int i = 0; i < getPropertyCount(); i++) { - if (!getProperty(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getParameterCount(); i++) { - if (!getParameter(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - protocolName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addProperty(subBuilder.buildPartial()); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addParameter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required string protocolName = 2; - private java.lang.Object protocolName_ = ""; - public boolean hasProtocolName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getProtocolName() { - java.lang.Object ref = protocolName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - protocolName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setProtocolName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - protocolName_ = value; - onChanged(); - return this; - } - public Builder clearProtocolName() { - bitField0_ = (bitField0_ & ~0x00000002); - protocolName_ = getDefaultInstance().getProtocolName(); - onChanged(); - return this; - } - void setProtocolName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - protocolName_ = value; - onChanged(); - } - - // required string methodName = 3; - private java.lang.Object methodName_ = ""; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - methodName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setMethodName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - methodName_ = value; - onChanged(); - return this; - } - public Builder clearMethodName() { - bitField0_ = (bitField0_ & ~0x00000004); - methodName_ = getDefaultInstance().getMethodName(); - onChanged(); - return this; - } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; - methodName_ = value; - onChanged(); - } - - // repeated .NameStringPair property = 4; - private java.util.List property_ = - java.util.Collections.emptyList(); - private void ensurePropertyIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - property_ = new java.util.ArrayList(property_); - bitField0_ |= 0x00000008; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> propertyBuilder_; - - public java.util.List getPropertyList() { - if (propertyBuilder_ == null) { - return java.util.Collections.unmodifiableList(property_); - } else { - return propertyBuilder_.getMessageList(); - } - } - public int getPropertyCount() { - if (propertyBuilder_ == null) { - return property_.size(); - } else { - return propertyBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { - if (propertyBuilder_ == null) { - return property_.get(index); - } else { - return propertyBuilder_.getMessage(index); - } - } - public Builder setProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { - if (propertyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePropertyIsMutable(); - property_.set(index, value); - onChanged(); - } else { - propertyBuilder_.setMessage(index, value); - } - return this; - } - public Builder setProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.set(index, builderForValue.build()); - onChanged(); - } else { - propertyBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { - if (propertyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePropertyIsMutable(); - property_.add(value); - onChanged(); - } else { - propertyBuilder_.addMessage(value); - } - return this; - } - public Builder addProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { - if (propertyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePropertyIsMutable(); - property_.add(index, value); - onChanged(); - } else { - propertyBuilder_.addMessage(index, value); - } - return this; - } - public Builder addProperty( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.add(builderForValue.build()); - onChanged(); - } else { - propertyBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.add(index, builderForValue.build()); - onChanged(); - } else { - propertyBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllProperty( - java.lang.Iterable values) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - super.addAll(values, property_); - onChanged(); - } else { - propertyBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearProperty() { - if (propertyBuilder_ == null) { - property_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - propertyBuilder_.clear(); - } - return this; - } - public Builder removeProperty(int index) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.remove(index); - onChanged(); - } else { - propertyBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getPropertyBuilder( - int index) { - return getPropertyFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( - int index) { - if (propertyBuilder_ == null) { - return property_.get(index); } else { - return propertyBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getPropertyOrBuilderList() { - if (propertyBuilder_ != null) { - return propertyBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(property_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder() { - return getPropertyFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder( - int index) { - return getPropertyFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); - } - public java.util.List - getPropertyBuilderList() { - return getPropertyFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> - getPropertyFieldBuilder() { - if (propertyBuilder_ == null) { - propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( - property_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - property_ = null; - } - return propertyBuilder_; - } - - // repeated .NameBytesPair parameter = 5; - private java.util.List parameter_ = - java.util.Collections.emptyList(); - private void ensureParameterIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - parameter_ = new java.util.ArrayList(parameter_); - bitField0_ |= 0x00000010; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> parameterBuilder_; - - public java.util.List getParameterList() { - if (parameterBuilder_ == null) { - return java.util.Collections.unmodifiableList(parameter_); - } else { - return parameterBuilder_.getMessageList(); - } - } - public int getParameterCount() { - if (parameterBuilder_ == null) { - return parameter_.size(); - } else { - return parameterBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { - if (parameterBuilder_ == null) { - return parameter_.get(index); - } else { - return parameterBuilder_.getMessage(index); - } - } - public Builder setParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (parameterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureParameterIsMutable(); - parameter_.set(index, value); - onChanged(); - } else { - parameterBuilder_.setMessage(index, value); - } - return this; - } - public Builder setParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.set(index, builderForValue.build()); - onChanged(); - } else { - parameterBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (parameterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureParameterIsMutable(); - parameter_.add(value); - onChanged(); - } else { - parameterBuilder_.addMessage(value); - } - return this; - } - public Builder addParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (parameterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureParameterIsMutable(); - parameter_.add(index, value); - onChanged(); - } else { - parameterBuilder_.addMessage(index, value); - } - return this; - } - public Builder addParameter( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.add(builderForValue.build()); - onChanged(); - } else { - parameterBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.add(index, builderForValue.build()); - onChanged(); - } else { - parameterBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllParameter( - java.lang.Iterable values) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - super.addAll(values, parameter_); - onChanged(); - } else { - parameterBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearParameter() { - if (parameterBuilder_ == null) { - parameter_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - onChanged(); - } else { - parameterBuilder_.clear(); - } - return this; - } - public Builder removeParameter(int index) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.remove(index); - onChanged(); - } else { - parameterBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getParameterBuilder( - int index) { - return getParameterFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( - int index) { - if (parameterBuilder_ == null) { - return parameter_.get(index); } else { - return parameterBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getParameterOrBuilderList() { - if (parameterBuilder_ != null) { - return parameterBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(parameter_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder() { - return getParameterFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder( - int index) { - return getParameterFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); - } - public java.util.List - getParameterBuilderList() { - return getParameterFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getParameterFieldBuilder() { - if (parameterBuilder_ == null) { - parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - parameter_, - ((bitField0_ & 0x00000010) == 0x00000010), - getParentForChildren(), - isClean()); - parameter_ = null; - } - return parameterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Exec) - } - - static { - defaultInstance = new Exec(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Exec) - } - - public interface ExecCoprocessorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .Exec call = 2; - boolean hasCall(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder(); - } - public static final class ExecCoprocessorRequest extends - com.google.protobuf.GeneratedMessage - implements ExecCoprocessorRequestOrBuilder { - // Use ExecCoprocessorRequest.newBuilder() to construct. - private ExecCoprocessorRequest(Builder builder) { - super(builder); - } - private ExecCoprocessorRequest(boolean noInit) {} - - private static final ExecCoprocessorRequest defaultInstance; - public static ExecCoprocessorRequest getDefaultInstance() { - return defaultInstance; - } - - public ExecCoprocessorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .Exec call = 2; - public static final int CALL_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_; - public boolean hasCall() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { - return call_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { - return call_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCall()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCall().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, call_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, call_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasCall() == other.hasCall()); - if (hasCall()) { - result = result && getCall() - .equals(other.getCall()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasCall()) { - hash = (37 * hash) + CALL_FIELD_NUMBER; - hash = (53 * hash) + getCall().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getCallFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - } else { - callBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (callBuilder_ == null) { - result.call_ = call_; - } else { - result.call_ = callBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasCall()) { - mergeCall(other.getCall()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasCall()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getCall().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(); - if (hasCall()) { - subBuilder.mergeFrom(getCall()); - } - input.readMessage(subBuilder, extensionRegistry); - setCall(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .Exec call = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> callBuilder_; - public boolean hasCall() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { - if (callBuilder_ == null) { - return call_; - } else { - return callBuilder_.getMessage(); - } - } - public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { - if (callBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - call_ = value; - onChanged(); - } else { - callBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setCall( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) { - if (callBuilder_ == null) { - call_ = builderForValue.build(); - onChanged(); - } else { - callBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { - if (callBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) { - call_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); - } else { - call_ = value; - } - onChanged(); - } else { - callBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearCall() { - if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - onChanged(); - } else { - callBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getCallBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getCallFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { - if (callBuilder_ != null) { - return callBuilder_.getMessageOrBuilder(); - } else { - return call_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> - getCallFieldBuilder() { - if (callBuilder_ == null) { - callBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>( - call_, - getParentForChildren(), - isClean()); - call_ = null; - } - return callBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ExecCoprocessorRequest) - } - - static { - defaultInstance = new ExecCoprocessorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ExecCoprocessorRequest) - } - - public interface ExecCoprocessorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .NameBytesPair value = 1; - boolean hasValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); - } - public static final class ExecCoprocessorResponse extends - com.google.protobuf.GeneratedMessage - implements ExecCoprocessorResponseOrBuilder { - // Use ExecCoprocessorResponse.newBuilder() to construct. - private ExecCoprocessorResponse(Builder builder) { - super(builder); - } - private ExecCoprocessorResponse(boolean noInit) {} - - private static final ExecCoprocessorResponse defaultInstance; - public static ExecCoprocessorResponse getDefaultInstance() { - return defaultInstance; - } - - public ExecCoprocessorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; - } - - private int bitField0_; - // required .NameBytesPair value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - return value_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - return value_; - } - - private void initFields() { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - if (!getValue().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) obj; - - boolean result = true; - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getValueFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (valueBuilder_ == null) { - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; - if (other.hasValue()) { - mergeValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasValue()) { - - return false; - } - if (!getValue().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - if (hasValue()) { - subBuilder.mergeFrom(getValue()); - } - input.readMessage(subBuilder, extensionRegistry); - setValue(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .NameBytesPair value = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - if (valueBuilder_ == null) { - return value_; - } else { - return valueBuilder_.getMessage(); - } - } - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - value_ = value; - onChanged(); - } else { - valueBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); - onChanged(); - } else { - valueBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { - value_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); - } else { - value_ = value; - } - onChanged(); - } else { - valueBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - onChanged(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getValueFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); - } else { - return value_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - value_, - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ExecCoprocessorResponse) - } - - static { - defaultInstance = new ExecCoprocessorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse) - } - - public interface CoprocessorServiceCallOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required string serviceName = 2; - boolean hasServiceName(); - String getServiceName(); - - // required string methodName = 3; - boolean hasMethodName(); - String getMethodName(); - - // required bytes request = 4; - boolean hasRequest(); - com.google.protobuf.ByteString getRequest(); - } - public static final class CoprocessorServiceCall extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceCallOrBuilder { - // Use CoprocessorServiceCall.newBuilder() to construct. - private CoprocessorServiceCall(Builder builder) { - super(builder); - } - private CoprocessorServiceCall(boolean noInit) {} - - private static final CoprocessorServiceCall defaultInstance; - public static CoprocessorServiceCall getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceCall getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required string serviceName = 2; - public static final int SERVICENAME_FIELD_NUMBER = 2; - private java.lang.Object serviceName_; - public boolean hasServiceName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getServiceName() { - java.lang.Object ref = serviceName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - serviceName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getServiceNameBytes() { - java.lang.Object ref = serviceName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - serviceName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string methodName = 3; - public static final int METHODNAME_FIELD_NUMBER = 3; - private java.lang.Object methodName_; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - methodName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getMethodNameBytes() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - methodName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required bytes request = 4; - public static final int REQUEST_FIELD_NUMBER = 4; - private com.google.protobuf.ByteString request_; - public boolean hasRequest() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - serviceName_ = ""; - methodName_ = ""; - request_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasServiceName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMethodName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRequest()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getServiceNameBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, request_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getServiceNameBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, request_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasServiceName() == other.hasServiceName()); - if (hasServiceName()) { - result = result && getServiceName() - .equals(other.getServiceName()); - } - result = result && (hasMethodName() == other.hasMethodName()); - if (hasMethodName()) { - result = result && getMethodName() - .equals(other.getMethodName()); - } - result = result && (hasRequest() == other.hasRequest()); - if (hasRequest()) { - result = result && getRequest() - .equals(other.getRequest()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasServiceName()) { - hash = (37 * hash) + SERVICENAME_FIELD_NUMBER; - hash = (53 * hash) + getServiceName().hashCode(); - } - if (hasMethodName()) { - hash = (37 * hash) + METHODNAME_FIELD_NUMBER; - hash = (53 * hash) + getMethodName().hashCode(); - } - if (hasRequest()) { - hash = (37 * hash) + REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getRequest().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - serviceName_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - methodName_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); - request_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.serviceName_ = serviceName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.methodName_ = methodName_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.request_ = request_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasServiceName()) { - setServiceName(other.getServiceName()); - } - if (other.hasMethodName()) { - setMethodName(other.getMethodName()); - } - if (other.hasRequest()) { - setRequest(other.getRequest()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasServiceName()) { - - return false; - } - if (!hasMethodName()) { - - return false; - } - if (!hasRequest()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - serviceName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - request_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required string serviceName = 2; - private java.lang.Object serviceName_ = ""; - public boolean hasServiceName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getServiceName() { - java.lang.Object ref = serviceName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - serviceName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setServiceName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - serviceName_ = value; - onChanged(); - return this; - } - public Builder clearServiceName() { - bitField0_ = (bitField0_ & ~0x00000002); - serviceName_ = getDefaultInstance().getServiceName(); - onChanged(); - return this; - } - void setServiceName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - serviceName_ = value; - onChanged(); - } - - // required string methodName = 3; - private java.lang.Object methodName_ = ""; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - methodName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setMethodName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - methodName_ = value; - onChanged(); - return this; - } - public Builder clearMethodName() { - bitField0_ = (bitField0_ & ~0x00000004); - methodName_ = getDefaultInstance().getMethodName(); - onChanged(); - return this; - } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; - methodName_ = value; - onChanged(); - } - - // required bytes request = 4; - private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRequest() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - public Builder setRequest(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - request_ = value; - onChanged(); - return this; - } - public Builder clearRequest() { - bitField0_ = (bitField0_ & ~0x00000008); - request_ = getDefaultInstance().getRequest(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CoprocessorServiceCall) - } - - static { - defaultInstance = new CoprocessorServiceCall(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CoprocessorServiceCall) - } - - public interface CoprocessorServiceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .CoprocessorServiceCall call = 2; - boolean hasCall(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder(); - } - public static final class CoprocessorServiceRequest extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceRequestOrBuilder { - // Use CoprocessorServiceRequest.newBuilder() to construct. - private CoprocessorServiceRequest(Builder builder) { - super(builder); - } - private CoprocessorServiceRequest(boolean noInit) {} - - private static final CoprocessorServiceRequest defaultInstance; - public static CoprocessorServiceRequest getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .CoprocessorServiceCall call = 2; - public static final int CALL_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_; - public boolean hasCall() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { - return call_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { - return call_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCall()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCall().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, call_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, call_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasCall() == other.hasCall()); - if (hasCall()) { - result = result && getCall() - .equals(other.getCall()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasCall()) { - hash = (37 * hash) + CALL_FIELD_NUMBER; - hash = (53 * hash) + getCall().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getCallFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - } else { - callBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (callBuilder_ == null) { - result.call_ = call_; - } else { - result.call_ = callBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasCall()) { - mergeCall(other.getCall()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasCall()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getCall().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(); - if (hasCall()) { - subBuilder.mergeFrom(getCall()); - } - input.readMessage(subBuilder, extensionRegistry); - setCall(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .CoprocessorServiceCall call = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_; - public boolean hasCall() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { - if (callBuilder_ == null) { - return call_; - } else { - return callBuilder_.getMessage(); - } - } - public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { - if (callBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - call_ = value; - onChanged(); - } else { - callBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setCall( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) { - if (callBuilder_ == null) { - call_ = builderForValue.build(); - onChanged(); - } else { - callBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { - if (callBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) { - call_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial(); - } else { - call_ = value; - } - onChanged(); - } else { - callBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearCall() { - if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - onChanged(); - } else { - callBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getCallFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { - if (callBuilder_ != null) { - return callBuilder_.getMessageOrBuilder(); - } else { - return call_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> - getCallFieldBuilder() { - if (callBuilder_ == null) { - callBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>( - call_, - getParentForChildren(), - isClean()); - call_ = null; - } - return callBuilder_; - } - - // @@protoc_insertion_point(builder_scope:CoprocessorServiceRequest) - } - - static { - defaultInstance = new CoprocessorServiceRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CoprocessorServiceRequest) - } - - public interface CoprocessorServiceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .NameBytesPair value = 2; - boolean hasValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); - } - public static final class CoprocessorServiceResponse extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceResponseOrBuilder { - // Use CoprocessorServiceResponse.newBuilder() to construct. - private CoprocessorServiceResponse(Builder builder) { - super(builder); - } - private CoprocessorServiceResponse(boolean noInit) {} - - private static final CoprocessorServiceResponse defaultInstance; - public static CoprocessorServiceResponse getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .NameBytesPair value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - return value_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - return value_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getValue().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getValueFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (valueBuilder_ == null) { - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasValue()) { - mergeValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasValue()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getValue().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - if (hasValue()) { - subBuilder.mergeFrom(getValue()); - } - input.readMessage(subBuilder, extensionRegistry); - setValue(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .NameBytesPair value = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - if (valueBuilder_ == null) { - return value_; - } else { - return valueBuilder_.getMessage(); - } - } - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - value_ = value; - onChanged(); - } else { - valueBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); - onChanged(); - } else { - valueBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { - value_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); - } else { - value_ = value; - } - onChanged(); - } else { - valueBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - onChanged(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getValueFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); - } else { - return value_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - value_, - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - // @@protoc_insertion_point(builder_scope:CoprocessorServiceResponse) - } - - static { - defaultInstance = new CoprocessorServiceResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CoprocessorServiceResponse) - } - - public interface MultiActionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .Mutate mutate = 1; - boolean hasMutate(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder(); - - // optional .Get get = 2; - boolean hasGet(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); - - // optional .Exec exec = 3; - boolean hasExec(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder(); - } - public static final class MultiAction extends - com.google.protobuf.GeneratedMessage - implements MultiActionOrBuilder { - // Use MultiAction.newBuilder() to construct. - private MultiAction(Builder builder) { - super(builder); - } - private MultiAction(boolean noInit) {} - - private static final MultiAction defaultInstance; - public static MultiAction getDefaultInstance() { - return defaultInstance; - } - - public MultiAction getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable; - } - - private int bitField0_; - // optional .Mutate mutate = 1; - public static final int MUTATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_; - public boolean hasMutate() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { - return mutate_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { - return mutate_; - } - - // optional .Get get = 2; - public static final int GET_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { - return get_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - return get_; - } - - // optional .Exec exec = 3; - public static final int EXEC_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec exec_; - public boolean hasExec() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec() { - return exec_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder() { - return exec_; - } - - private void initFields() { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasMutate()) { - if (!getMutate().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasGet()) { - if (!getGet().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasExec()) { - if (!getExec().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, mutate_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, get_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, exec_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, mutate_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, get_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, exec_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) obj; - - boolean result = true; - result = result && (hasMutate() == other.hasMutate()); - if (hasMutate()) { - result = result && getMutate() - .equals(other.getMutate()); - } - result = result && (hasGet() == other.hasGet()); - if (hasGet()) { - result = result && getGet() - .equals(other.getGet()); - } - result = result && (hasExec() == other.hasExec()); - if (hasExec()) { - result = result && getExec() - .equals(other.getExec()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMutate()) { - hash = (37 * hash) + MUTATE_FIELD_NUMBER; - hash = (53 * hash) + getMutate().hashCode(); - } - if (hasGet()) { - hash = (37 * hash) + GET_FIELD_NUMBER; - hash = (53 * hash) + getGet().hashCode(); - } - if (hasExec()) { - hash = (37 * hash) + EXEC_FIELD_NUMBER; - hash = (53 * hash) + getExec().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMutateFieldBuilder(); - getGetFieldBuilder(); - getExecFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - } else { - mutateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - if (execBuilder_ == null) { - exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - } else { - execBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (mutateBuilder_ == null) { - result.mutate_ = mutate_; - } else { - result.mutate_ = mutateBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (getBuilder_ == null) { - result.get_ = get_; - } else { - result.get_ = getBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - if (execBuilder_ == null) { - result.exec_ = exec_; - } else { - result.exec_ = execBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()) return this; - if (other.hasMutate()) { - mergeMutate(other.getMutate()); - } - if (other.hasGet()) { - mergeGet(other.getGet()); - } - if (other.hasExec()) { - mergeExec(other.getExec()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasMutate()) { - if (!getMutate().isInitialized()) { - - return false; - } - } - if (hasGet()) { - if (!getGet().isInitialized()) { - - return false; - } - } - if (hasExec()) { - if (!getExec().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); - if (hasMutate()) { - subBuilder.mergeFrom(getMutate()); - } - input.readMessage(subBuilder, extensionRegistry); - setMutate(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); - if (hasGet()) { - subBuilder.mergeFrom(getGet()); - } - input.readMessage(subBuilder, extensionRegistry); - setGet(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(); - if (hasExec()) { - subBuilder.mergeFrom(getExec()); - } - input.readMessage(subBuilder, extensionRegistry); - setExec(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // optional .Mutate mutate = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutateBuilder_; - public boolean hasMutate() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { - if (mutateBuilder_ == null) { - return mutate_; - } else { - return mutateBuilder_.getMessage(); - } - } - public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutateBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - mutate_ = value; - onChanged(); - } else { - mutateBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setMutate( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { - if (mutateBuilder_ == null) { - mutate_ = builderForValue.build(); - onChanged(); - } else { - mutateBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutateBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - mutate_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) { - mutate_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); - } else { - mutate_ = value; - } - onChanged(); - } else { - mutateBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearMutate() { - if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); - onChanged(); - } else { - mutateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutateBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getMutateFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { - if (mutateBuilder_ != null) { - return mutateBuilder_.getMessageOrBuilder(); - } else { - return mutate_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> - getMutateFieldBuilder() { - if (mutateBuilder_ == null) { - mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( - mutate_, - getParentForChildren(), - isClean()); - mutate_ = null; - } - return mutateBuilder_; - } - - // optional .Get get = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { - if (getBuilder_ == null) { - return get_; - } else { - return getBuilder_.getMessage(); - } - } - public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { - if (getBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - get_ = value; - onChanged(); - } else { - getBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setGet( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { - if (getBuilder_ == null) { - get_ = builderForValue.build(); - onChanged(); - } else { - getBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { - if (getBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { - get_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); - } else { - get_ = value; - } - onChanged(); - } else { - getBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearGet() { - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - onChanged(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getGetFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - if (getBuilder_ != null) { - return getBuilder_.getMessageOrBuilder(); - } else { - return get_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> - getGetFieldBuilder() { - if (getBuilder_ == null) { - getBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( - get_, - getParentForChildren(), - isClean()); - get_ = null; - } - return getBuilder_; - } - - // optional .Exec exec = 3; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> execBuilder_; - public boolean hasExec() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec() { - if (execBuilder_ == null) { - return exec_; - } else { - return execBuilder_.getMessage(); - } - } - public Builder setExec(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { - if (execBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - exec_ = value; - onChanged(); - } else { - execBuilder_.setMessage(value); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder setExec( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) { - if (execBuilder_ == null) { - exec_ = builderForValue.build(); - onChanged(); - } else { - execBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder mergeExec(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { - if (execBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004) && - exec_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) { - exec_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(exec_).mergeFrom(value).buildPartial(); - } else { - exec_ = value; - } - onChanged(); - } else { - execBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder clearExec() { - if (execBuilder_ == null) { - exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); - onChanged(); - } else { - execBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getExecBuilder() { - bitField0_ |= 0x00000004; - onChanged(); - return getExecFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder() { - if (execBuilder_ != null) { - return execBuilder_.getMessageOrBuilder(); - } else { - return exec_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> - getExecFieldBuilder() { - if (execBuilder_ == null) { - execBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>( - exec_, - getParentForChildren(), - isClean()); - exec_ = null; - } - return execBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MultiAction) - } - - static { - defaultInstance = new MultiAction(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiAction) - } - - public interface ActionResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .NameBytesPair value = 1; - boolean hasValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); - - // optional .NameBytesPair exception = 2; - boolean hasException(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); - } - public static final class ActionResult extends - com.google.protobuf.GeneratedMessage - implements ActionResultOrBuilder { - // Use ActionResult.newBuilder() to construct. - private ActionResult(Builder builder) { - super(builder); - } - private ActionResult(boolean noInit) {} - - private static final ActionResult defaultInstance; - public static ActionResult getDefaultInstance() { - return defaultInstance; - } - - public ActionResult getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; - } - - private int bitField0_; - // optional .NameBytesPair value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - return value_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - return value_; - } - - // optional .NameBytesPair exception = 2; - public static final int EXCEPTION_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; - public boolean hasException() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { - return exception_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { - return exception_; - } - - private void initFields() { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasValue()) { - if (!getValue().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasException()) { - if (!getException().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, value_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, exception_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, value_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, exception_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) obj; - - boolean result = true; - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && (hasException() == other.hasException()); - if (hasException()) { - result = result && getException() - .equals(other.getException()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - if (hasException()) { - hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; - hash = (53 * hash) + getException().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getValueFieldBuilder(); - getExceptionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } else { - exceptionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (valueBuilder_ == null) { - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (exceptionBuilder_ == null) { - result.exception_ = exception_; - } else { - result.exception_ = exceptionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()) return this; - if (other.hasValue()) { - mergeValue(other.getValue()); - } - if (other.hasException()) { - mergeException(other.getException()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasValue()) { - if (!getValue().isInitialized()) { - - return false; - } - } - if (hasException()) { - if (!getException().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - if (hasValue()) { - subBuilder.mergeFrom(getValue()); - } - input.readMessage(subBuilder, extensionRegistry); - setValue(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); - if (hasException()) { - subBuilder.mergeFrom(getException()); - } - input.readMessage(subBuilder, extensionRegistry); - setException(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // optional .NameBytesPair value = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - if (valueBuilder_ == null) { - return value_; - } else { - return valueBuilder_.getMessage(); - } - } - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - value_ = value; - onChanged(); - } else { - valueBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); - onChanged(); - } else { - valueBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { - value_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); - } else { - value_ = value; - } - onChanged(); - } else { - valueBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - onChanged(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getValueFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); - } else { - return value_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - value_, - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - // optional .NameBytesPair exception = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; - public boolean hasException() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { - if (exceptionBuilder_ == null) { - return exception_; - } else { - return exceptionBuilder_.getMessage(); - } - } - public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (exceptionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - exception_ = value; - onChanged(); - } else { - exceptionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setException( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (exceptionBuilder_ == null) { - exception_ = builderForValue.build(); - onChanged(); - } else { - exceptionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (exceptionBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { - exception_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); - } else { - exception_ = value; - } - onChanged(); - } else { - exceptionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearException() { - if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - onChanged(); - } else { - exceptionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getExceptionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { - if (exceptionBuilder_ != null) { - return exceptionBuilder_.getMessageOrBuilder(); - } else { - return exception_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getExceptionFieldBuilder() { - if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - exception_, - getParentForChildren(), - isClean()); - exception_ = null; - } - return exceptionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ActionResult) - } - - static { - defaultInstance = new ActionResult(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ActionResult) - } - - public interface MultiRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated .MultiAction action = 2; - java.util.List - getActionList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index); - int getActionCount(); - java.util.List - getActionOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( - int index); - - // optional bool atomic = 3; - boolean hasAtomic(); - boolean getAtomic(); - } - public static final class MultiRequest extends - com.google.protobuf.GeneratedMessage - implements MultiRequestOrBuilder { - // Use MultiRequest.newBuilder() to construct. - private MultiRequest(Builder builder) { - super(builder); - } - private MultiRequest(boolean noInit) {} - - private static final MultiRequest defaultInstance; - public static MultiRequest getDefaultInstance() { - return defaultInstance; - } - - public MultiRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated .MultiAction action = 2; - public static final int ACTION_FIELD_NUMBER = 2; - private java.util.List action_; - public java.util.List getActionList() { - return action_; - } - public java.util.List - getActionOrBuilderList() { - return action_; - } - public int getActionCount() { - return action_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { - return action_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( - int index) { - return action_.get(index); - } - - // optional bool atomic = 3; - public static final int ATOMIC_FIELD_NUMBER = 3; - private boolean atomic_; - public boolean hasAtomic() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getAtomic() { - return atomic_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - action_ = java.util.Collections.emptyList(); - atomic_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getActionCount(); i++) { - if (!getAction(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < action_.size(); i++) { - output.writeMessage(2, action_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(3, atomic_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - for (int i = 0; i < action_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, action_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, atomic_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getActionList() - .equals(other.getActionList()); - result = result && (hasAtomic() == other.hasAtomic()); - if (hasAtomic()) { - result = result && (getAtomic() - == other.getAtomic()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getActionCount() > 0) { - hash = (37 * hash) + ACTION_FIELD_NUMBER; - hash = (53 * hash) + getActionList().hashCode(); - } - if (hasAtomic()) { - hash = (37 * hash) + ATOMIC_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAtomic()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getActionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (actionBuilder_ == null) { - action_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - actionBuilder_.clear(); - } - atomic_ = false; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (actionBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - action_ = java.util.Collections.unmodifiableList(action_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.action_ = action_; - } else { - result.action_ = actionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.atomic_ = atomic_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (actionBuilder_ == null) { - if (!other.action_.isEmpty()) { - if (action_.isEmpty()) { - action_ = other.action_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureActionIsMutable(); - action_.addAll(other.action_); - } - onChanged(); - } - } else { - if (!other.action_.isEmpty()) { - if (actionBuilder_.isEmpty()) { - actionBuilder_.dispose(); - actionBuilder_ = null; - action_ = other.action_; - bitField0_ = (bitField0_ & ~0x00000002); - actionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getActionFieldBuilder() : null; - } else { - actionBuilder_.addAllMessages(other.action_); - } - } - } - if (other.hasAtomic()) { - setAtomic(other.getAtomic()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - for (int i = 0; i < getActionCount(); i++) { - if (!getAction(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAction(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - atomic_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated .MultiAction action = 2; - private java.util.List action_ = - java.util.Collections.emptyList(); - private void ensureActionIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - action_ = new java.util.ArrayList(action_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> actionBuilder_; - - public java.util.List getActionList() { - if (actionBuilder_ == null) { - return java.util.Collections.unmodifiableList(action_); - } else { - return actionBuilder_.getMessageList(); - } - } - public int getActionCount() { - if (actionBuilder_ == null) { - return action_.size(); - } else { - return actionBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { - if (actionBuilder_ == null) { - return action_.get(index); - } else { - return actionBuilder_.getMessage(index); - } - } - public Builder setAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { - if (actionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.set(index, value); - onChanged(); - } else { - actionBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.set(index, builderForValue.build()); - onChanged(); - } else { - actionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { - if (actionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.add(value); - onChanged(); - } else { - actionBuilder_.addMessage(value); - } - return this; - } - public Builder addAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { - if (actionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.add(index, value); - onChanged(); - } else { - actionBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAction( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.add(builderForValue.build()); - onChanged(); - } else { - actionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.add(index, builderForValue.build()); - onChanged(); - } else { - actionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAction( - java.lang.Iterable values) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - super.addAll(values, action_); - onChanged(); - } else { - actionBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAction() { - if (actionBuilder_ == null) { - action_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - actionBuilder_.clear(); - } - return this; - } - public Builder removeAction(int index) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.remove(index); - onChanged(); - } else { - actionBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder getActionBuilder( - int index) { - return getActionFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( - int index) { - if (actionBuilder_ == null) { - return action_.get(index); } else { - return actionBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getActionOrBuilderList() { - if (actionBuilder_ != null) { - return actionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(action_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder() { - return getActionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder( - int index) { - return getActionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); - } - public java.util.List - getActionBuilderList() { - return getActionFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> - getActionFieldBuilder() { - if (actionBuilder_ == null) { - actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder>( - action_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - action_ = null; - } - return actionBuilder_; - } - - // optional bool atomic = 3; - private boolean atomic_ ; - public boolean hasAtomic() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getAtomic() { - return atomic_; - } - public Builder setAtomic(boolean value) { - bitField0_ |= 0x00000004; - atomic_ = value; - onChanged(); - return this; - } - public Builder clearAtomic() { - bitField0_ = (bitField0_ & ~0x00000004); - atomic_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:MultiRequest) - } - - static { - defaultInstance = new MultiRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiRequest) - } - - public interface MultiResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .ActionResult result = 1; - java.util.List - getResultList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index); - int getResultCount(); - java.util.List - getResultOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( - int index); - } - public static final class MultiResponse extends - com.google.protobuf.GeneratedMessage - implements MultiResponseOrBuilder { - // Use MultiResponse.newBuilder() to construct. - private MultiResponse(Builder builder) { - super(builder); - } - private MultiResponse(boolean noInit) {} - - private static final MultiResponse defaultInstance; - public static MultiResponse getDefaultInstance() { - return defaultInstance; - } - - public MultiResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; - } - - // repeated .ActionResult result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private java.util.List result_; - public java.util.List getResultList() { - return result_; - } - public java.util.List - getResultOrBuilderList() { - return result_; - } - public int getResultCount() { - return result_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { - return result_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( - int index) { - return result_.get(index); - } - - private void initFields() { - result_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < result_.size(); i++) { - output.writeMessage(1, result_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < result_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj; - - boolean result = true; - result = result && getResultList() - .equals(other.getResultList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResultCount() > 0) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResultList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - resultBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this); - int from_bitField0_ = bitField0_; - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = java.util.Collections.unmodifiableList(result_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; - if (resultBuilder_ == null) { - if (!other.result_.isEmpty()) { - if (result_.isEmpty()) { - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureResultIsMutable(); - result_.addAll(other.result_); - } - onChanged(); - } - } else { - if (!other.result_.isEmpty()) { - if (resultBuilder_.isEmpty()) { - resultBuilder_.dispose(); - resultBuilder_ = null; - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - resultBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResultFieldBuilder() : null; - } else { - resultBuilder_.addAllMessages(other.result_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResult(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .ActionResult result = 1; - private java.util.List result_ = - java.util.Collections.emptyList(); - private void ensureResultIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new java.util.ArrayList(result_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> resultBuilder_; - - public java.util.List getResultList() { - if (resultBuilder_ == null) { - return java.util.Collections.unmodifiableList(result_); - } else { - return resultBuilder_.getMessageList(); - } - } - public int getResultCount() { - if (resultBuilder_ == null) { - return result_.size(); - } else { - return resultBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { - if (resultBuilder_ == null) { - return result_.get(index); - } else { - return resultBuilder_.getMessage(index); - } - } - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.set(index, value); - onChanged(); - } else { - resultBuilder_.setMessage(index, value); - } - return this; - } - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.set(index, builderForValue.build()); - onChanged(); - } else { - resultBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - } else { - resultBuilder_.addMessage(value); - } - return this; - } - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(index, value); - onChanged(); - } else { - resultBuilder_.addMessage(index, value); - } - return this; - } - public Builder addResult( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(builderForValue.build()); - onChanged(); - } else { - resultBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(index, builderForValue.build()); - onChanged(); - } else { - resultBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllResult( - java.lang.Iterable values) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - super.addAll(values, result_); - onChanged(); - } else { - resultBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - resultBuilder_.clear(); - } - return this; - } - public Builder removeResult(int index) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.remove(index); - onChanged(); - } else { - resultBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder getResultBuilder( - int index) { - return getResultFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( - int index) { - if (resultBuilder_ == null) { - return result_.get(index); } else { - return resultBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getResultOrBuilderList() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(result_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder() { - return getResultFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder( - int index) { - return getResultFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); - } - public java.util.List - getResultBuilderList() { - return getResultFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder>( - result_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MultiResponse) - } - - static { - defaultInstance = new MultiResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiResponse) - } - - public static abstract class ClientService - implements com.google.protobuf.Service { - protected ClientService() {} - - public interface Interface { - public abstract void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new ClientService() { - @java.lang.Override - public void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done) { - impl.get(controller, request, done); - } - - @java.lang.Override - public void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done) { - impl.mutate(controller, request, done); - } - - @java.lang.Override - public void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done) { - impl.scan(controller, request, done); - } - - @java.lang.Override - public void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done) { - impl.lockRow(controller, request, done); - } - - @java.lang.Override - public void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done) { - impl.unlockRow(controller, request, done); - } - - @java.lang.Override - public void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done) { - impl.bulkLoadHFile(controller, request, done); - } - - @java.lang.Override - public void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done) { - impl.execCoprocessor(controller, request, done); - } - - @java.lang.Override - public void execService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done) { - impl.execService(controller, request, done); - } - - @java.lang.Override - public void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done) { - impl.multi(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request); - case 1: - return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request); - case 2: - return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request); - case 3: - return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request); - case 4: - return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request); - case 5: - return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request); - case 6: - return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request); - case 7: - return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); - case 8: - return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 7: - this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 8: - this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance())); - } - - public void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance())); - } - - public void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance())); - } - - public void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance())); - } - - public void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance())); - } - - public void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); - } - - public void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance())); - } - - public void execService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); - } - - public void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Column_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Column_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Get_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Get_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Result_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Result_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Condition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Condition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Mutate_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Mutate_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Mutate_ColumnValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Mutate_ColumnValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Mutate_ColumnValue_QualifierValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MutateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MutateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MutateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MutateResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Scan_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Scan_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ScanRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ScanRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ScanResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ScanResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_LockRowRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_LockRowRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_LockRowResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_LockRowResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UnlockRowRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UnlockRowRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UnlockRowResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UnlockRowResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BulkLoadHFileRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BulkLoadHFileRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BulkLoadHFileResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BulkLoadHFileResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Exec_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Exec_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ExecCoprocessorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ExecCoprocessorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ExecCoprocessorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ExecCoprocessorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CoprocessorServiceCall_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CoprocessorServiceCall_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CoprocessorServiceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CoprocessorServiceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CoprocessorServiceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CoprocessorServiceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiAction_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiAction_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ActionResult_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ActionResult_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\014Client.proto\032\013hbase.proto\032\020Comparator." + - "proto\"+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tquali" + - "fier\030\002 \003(\014\"\362\001\n\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006colum" + - "n\030\002 \003(\0132\007.Column\022!\n\tattribute\030\003 \003(\0132\016.Na" + - "meBytesPair\022\016\n\006lockId\030\004 \001(\004\022\027\n\006filter\030\005 " + - "\001(\0132\007.Filter\022\035\n\ttimeRange\030\006 \001(\0132\n.TimeRa" + - "nge\022\026\n\013maxVersions\030\007 \001(\r:\0011\022\031\n\013cacheBloc" + - "ks\030\010 \001(\010:\004true\022\022\n\nstoreLimit\030\t \001(\r\022\023\n\013st" + - "oreOffset\030\n \001(\r\"\037\n\006Result\022\025\n\rkeyValueByt" + - "es\030\001 \003(\014\"r\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020", - ".RegionSpecifier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020c" + - "losestRowBefore\030\003 \001(\010\022\025\n\rexistenceOnly\030\004" + - " \001(\010\"6\n\013GetResponse\022\027\n\006result\030\001 \001(\0132\007.Re" + - "sult\022\016\n\006exists\030\002 \001(\010\"\177\n\tCondition\022\013\n\003row" + - "\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(" + - "\014\022!\n\013compareType\030\004 \002(\0162\014.CompareType\022\037\n\n" + - "comparator\030\005 \002(\0132\013.Comparator\"\306\004\n\006Mutate" + - "\022\013\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Muta" + - "te.MutateType\022(\n\013columnValue\030\003 \003(\0132\023.Mut" + - "ate.ColumnValue\022!\n\tattribute\030\004 \003(\0132\016.Nam", - "eBytesPair\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030" + - "\006 \001(\004\022\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeR" + - "ange\030\n \001(\0132\n.TimeRange\032\310\001\n\013ColumnValue\022\016" + - "\n\006family\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\"" + - ".Mutate.ColumnValue.QualifierValue\032m\n\016Qu" + - "alifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value" + - "\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022&\n\ndeleteType\030" + - "\004 \001(\0162\022.Mutate.DeleteType\"<\n\nMutateType\022" + - "\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006D" + - "ELETE\020\003\"U\n\nDeleteType\022\026\n\022DELETE_ONE_VERS", - "ION\020\000\022\034\n\030DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rD" + - "ELETE_FAMILY\020\002\"i\n\rMutateRequest\022 \n\006regio" + - "n\030\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030\002 \002(" + - "\0132\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Conditio" + - "n\"<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132\007.Re" + - "sult\022\021\n\tprocessed\030\002 \001(\010\"\243\002\n\004Scan\022\027\n\006colu" + - "mn\030\001 \003(\0132\007.Column\022!\n\tattribute\030\002 \003(\0132\016.N" + - "ameBytesPair\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRo" + - "w\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007.Filter\022\035\n\ttime" + - "Range\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersions\030\007", - " \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022\021\n\tba" + - "tchSize\030\t \001(\r\022\025\n\rmaxResultSize\030\n \001(\004\022\022\n\n" + - "storeLimit\030\013 \001(\r\022\023\n\013storeOffset\030\014 \001(\r\"\230\001" + - "\n\013ScanRequest\022 \n\006region\030\001 \001(\0132\020.RegionSp" + - "ecifier\022\023\n\004scan\030\002 \001(\0132\005.Scan\022\021\n\tscannerI" + - "d\030\003 \001(\004\022\024\n\014numberOfRows\030\004 \001(\r\022\024\n\014closeSc" + - "anner\030\005 \001(\010\022\023\n\013nextCallSeq\030\006 \001(\004\"\\\n\014Scan" + - "Response\022\027\n\006result\030\001 \003(\0132\007.Result\022\021\n\tsca" + - "nnerId\030\002 \001(\004\022\023\n\013moreResults\030\003 \001(\010\022\013\n\003ttl" + - "\030\004 \001(\r\"?\n\016LockRowRequest\022 \n\006region\030\001 \002(\013", - "2\020.RegionSpecifier\022\013\n\003row\030\002 \003(\014\".\n\017LockR" + - "owResponse\022\016\n\006lockId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"" + - "D\n\020UnlockRowRequest\022 \n\006region\030\001 \002(\0132\020.Re" + - "gionSpecifier\022\016\n\006lockId\030\002 \002(\004\"\023\n\021UnlockR" + - "owResponse\"\260\001\n\024BulkLoadHFileRequest\022 \n\006r" + - "egion\030\001 \002(\0132\020.RegionSpecifier\0224\n\nfamilyP" + - "ath\030\002 \003(\0132 .BulkLoadHFileRequest.FamilyP" + - "ath\022\024\n\014assignSeqNum\030\003 \001(\010\032*\n\nFamilyPath\022" + - "\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoa" + - "dHFileResponse\022\016\n\006loaded\030\001 \002(\010\"\203\001\n\004Exec\022", - "\013\n\003row\030\001 \002(\014\022\024\n\014protocolName\030\002 \002(\t\022\022\n\nme" + - "thodName\030\003 \002(\t\022!\n\010property\030\004 \003(\0132\017.NameS" + - "tringPair\022!\n\tparameter\030\005 \003(\0132\016.NameBytes" + - "Pair\"O\n\026ExecCoprocessorRequest\022 \n\006region" + - "\030\001 \002(\0132\020.RegionSpecifier\022\023\n\004call\030\002 \002(\0132\005" + - ".Exec\"8\n\027ExecCoprocessorResponse\022\035\n\005valu" + - "e\030\001 \002(\0132\016.NameBytesPair\"_\n\026CoprocessorSe" + - "rviceCall\022\013\n\003row\030\001 \002(\014\022\023\n\013serviceName\030\002 " + - "\002(\t\022\022\n\nmethodName\030\003 \002(\t\022\017\n\007request\030\004 \002(\014" + - "\"d\n\031CoprocessorServiceRequest\022 \n\006region\030", - "\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 \002(\0132\027." + - "CoprocessorServiceCall\"]\n\032CoprocessorSer" + - "viceResponse\022 \n\006region\030\001 \002(\0132\020.RegionSpe" + - "cifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesPair\"N\n" + - "\013MultiAction\022\027\n\006mutate\030\001 \001(\0132\007.Mutate\022\021\n" + - "\003get\030\002 \001(\0132\004.Get\022\023\n\004exec\030\003 \001(\0132\005.Exec\"P\n" + - "\014ActionResult\022\035\n\005value\030\001 \001(\0132\016.NameBytes" + - "Pair\022!\n\texception\030\002 \001(\0132\016.NameBytesPair\"" + - "^\n\014MultiRequest\022 \n\006region\030\001 \002(\0132\020.Region" + - "Specifier\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022", - "\016\n\006atomic\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006resu" + - "lt\030\001 \003(\0132\r.ActionResult2\331\003\n\rClientServic" + - "e\022 \n\003get\022\013.GetRequest\032\014.GetResponse\022)\n\006m" + - "utate\022\016.MutateRequest\032\017.MutateResponse\022#" + - "\n\004scan\022\014.ScanRequest\032\r.ScanResponse\022,\n\007l" + - "ockRow\022\017.LockRowRequest\032\020.LockRowRespons" + - "e\0222\n\tunlockRow\022\021.UnlockRowRequest\032\022.Unlo" + - "ckRowResponse\022>\n\rbulkLoadHFile\022\025.BulkLoa" + - "dHFileRequest\032\026.BulkLoadHFileResponse\022D\n" + - "\017execCoprocessor\022\027.ExecCoprocessorReques", - "t\032\030.ExecCoprocessorResponse\022F\n\013execServi" + - "ce\022\032.CoprocessorServiceRequest\032\033.Coproce" + - "ssorServiceResponse\022&\n\005multi\022\r.MultiRequ" + - "est\032\016.MultiResponseBB\n*org.apache.hadoop" + - ".hbase.protobuf.generatedB\014ClientProtosH" + - "\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_Column_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_Column_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Column_descriptor, - new java.lang.String[] { "Family", "Qualifier", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); - internal_static_Get_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_Get_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Get_descriptor, - new java.lang.String[] { "Row", "Column", "Attribute", "LockId", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); - internal_static_Result_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_Result_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Result_descriptor, - new java.lang.String[] { "KeyValueBytes", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); - internal_static_GetRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_GetRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetRequest_descriptor, - new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); - internal_static_GetResponse_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_GetResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetResponse_descriptor, - new java.lang.String[] { "Result", "Exists", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); - internal_static_Condition_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_Condition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Condition_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); - internal_static_Mutate_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_Mutate_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Mutate_descriptor, - new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Attribute", "Timestamp", "LockId", "WriteToWAL", "TimeRange", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder.class); - internal_static_Mutate_ColumnValue_descriptor = - internal_static_Mutate_descriptor.getNestedTypes().get(0); - internal_static_Mutate_ColumnValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Mutate_ColumnValue_descriptor, - new java.lang.String[] { "Family", "QualifierValue", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder.class); - internal_static_Mutate_ColumnValue_QualifierValue_descriptor = - internal_static_Mutate_ColumnValue_descriptor.getNestedTypes().get(0); - internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Mutate_ColumnValue_QualifierValue_descriptor, - new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); - internal_static_MutateRequest_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_MutateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MutateRequest_descriptor, - new java.lang.String[] { "Region", "Mutate", "Condition", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); - internal_static_MutateResponse_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_MutateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MutateResponse_descriptor, - new java.lang.String[] { "Result", "Processed", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); - internal_static_Scan_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_Scan_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Scan_descriptor, - new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); - internal_static_ScanRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_ScanRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ScanRequest_descriptor, - new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); - internal_static_ScanResponse_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_ScanResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ScanResponse_descriptor, - new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); - internal_static_LockRowRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_LockRowRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_LockRowRequest_descriptor, - new java.lang.String[] { "Region", "Row", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.Builder.class); - internal_static_LockRowResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_LockRowResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_LockRowResponse_descriptor, - new java.lang.String[] { "LockId", "Ttl", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.Builder.class); - internal_static_UnlockRowRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_UnlockRowRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UnlockRowRequest_descriptor, - new java.lang.String[] { "Region", "LockId", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.Builder.class); - internal_static_UnlockRowResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_UnlockRowResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UnlockRowResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.Builder.class); - internal_static_BulkLoadHFileRequest_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_BulkLoadHFileRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BulkLoadHFileRequest_descriptor, - new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); - internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = - internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); - internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, - new java.lang.String[] { "Family", "Path", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); - internal_static_BulkLoadHFileResponse_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_BulkLoadHFileResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BulkLoadHFileResponse_descriptor, - new java.lang.String[] { "Loaded", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); - internal_static_Exec_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_Exec_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Exec_descriptor, - new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder.class); - internal_static_ExecCoprocessorRequest_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_ExecCoprocessorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ExecCoprocessorRequest_descriptor, - new java.lang.String[] { "Region", "Call", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.Builder.class); - internal_static_ExecCoprocessorResponse_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_ExecCoprocessorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ExecCoprocessorResponse_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.Builder.class); - internal_static_CoprocessorServiceCall_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_CoprocessorServiceCall_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CoprocessorServiceCall_descriptor, - new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); - internal_static_CoprocessorServiceRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_CoprocessorServiceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CoprocessorServiceRequest_descriptor, - new java.lang.String[] { "Region", "Call", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); - internal_static_CoprocessorServiceResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_CoprocessorServiceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CoprocessorServiceResponse_descriptor, - new java.lang.String[] { "Region", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); - internal_static_MultiAction_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_MultiAction_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiAction_descriptor, - new java.lang.String[] { "Mutate", "Get", "Exec", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); - internal_static_ActionResult_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_ActionResult_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ActionResult_descriptor, - new java.lang.String[] { "Value", "Exception", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); - internal_static_MultiRequest_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_MultiRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiRequest_descriptor, - new java.lang.String[] { "Region", "Action", "Atomic", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); - internal_static_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(27); - internal_static_MultiResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiResponse_descriptor, - new java.lang.String[] { "Result", }, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java deleted file mode 100644 index aac3b80..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java +++ /dev/null @@ -1,468 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ClusterId.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class ClusterIdProtos { - private ClusterIdProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ClusterIdOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string clusterId = 1; - boolean hasClusterId(); - String getClusterId(); - } - public static final class ClusterId extends - com.google.protobuf.GeneratedMessage - implements ClusterIdOrBuilder { - // Use ClusterId.newBuilder() to construct. - private ClusterId(Builder builder) { - super(builder); - } - private ClusterId(boolean noInit) {} - - private static final ClusterId defaultInstance; - public static ClusterId getDefaultInstance() { - return defaultInstance; - } - - public ClusterId getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable; - } - - private int bitField0_; - // required string clusterId = 1; - public static final int CLUSTERID_FIELD_NUMBER = 1; - private java.lang.Object clusterId_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getClusterId() { - java.lang.Object ref = clusterId_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - clusterId_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getClusterIdBytes() { - java.lang.Object ref = clusterId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - clusterId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - clusterId_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasClusterId()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getClusterIdBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getClusterIdBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) obj; - - boolean result = true; - result = result && (hasClusterId() == other.hasClusterId()); - if (hasClusterId()) { - result = result && getClusterId() - .equals(other.getClusterId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasClusterId()) { - hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; - hash = (53 * hash) + getClusterId().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - clusterId_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId build() { - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.clusterId_ = clusterId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this; - if (other.hasClusterId()) { - setClusterId(other.getClusterId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasClusterId()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - clusterId_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string clusterId = 1; - private java.lang.Object clusterId_ = ""; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getClusterId() { - java.lang.Object ref = clusterId_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - clusterId_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setClusterId(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - clusterId_ = value; - onChanged(); - return this; - } - public Builder clearClusterId() { - bitField0_ = (bitField0_ & ~0x00000001); - clusterId_ = getDefaultInstance().getClusterId(); - onChanged(); - return this; - } - void setClusterId(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - clusterId_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:ClusterId) - } - - static { - defaultInstance = new ClusterId(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ClusterId) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ClusterId_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ClusterId_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\017ClusterId.proto\"\036\n\tClusterId\022\021\n\tcluste" + - "rId\030\001 \002(\tBB\n*org.apache.hadoop.hbase.pro" + - "tobuf.generatedB\017ClusterIdProtosH\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_ClusterId_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_ClusterId_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ClusterId_descriptor, - new java.lang.String[] { "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java deleted file mode 100644 index a4c8509..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java +++ /dev/null @@ -1,4426 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ClusterStatus.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class ClusterStatusProtos { - private ClusterStatusProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface RegionStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionInfo regionInfo = 1; - boolean hasRegionInfo(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - - // required .RegionState.State state = 2; - boolean hasState(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); - - // optional uint64 stamp = 3; - boolean hasStamp(); - long getStamp(); - } - public static final class RegionState extends - com.google.protobuf.GeneratedMessage - implements RegionStateOrBuilder { - // Use RegionState.newBuilder() to construct. - private RegionState(Builder builder) { - super(builder); - } - private RegionState(boolean noInit) {} - - private static final RegionState defaultInstance; - public static RegionState getDefaultInstance() { - return defaultInstance; - } - - public RegionState getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; - } - - public enum State - implements com.google.protobuf.ProtocolMessageEnum { - OFFLINE(0, 0), - PENDING_OPEN(1, 1), - OPENING(2, 2), - OPEN(3, 3), - PENDING_CLOSE(4, 4), - CLOSING(5, 5), - CLOSED(6, 6), - SPLITTING(7, 7), - SPLIT(8, 8), - ; - - public static final int OFFLINE_VALUE = 0; - public static final int PENDING_OPEN_VALUE = 1; - public static final int OPENING_VALUE = 2; - public static final int OPEN_VALUE = 3; - public static final int PENDING_CLOSE_VALUE = 4; - public static final int CLOSING_VALUE = 5; - public static final int CLOSED_VALUE = 6; - public static final int SPLITTING_VALUE = 7; - public static final int SPLIT_VALUE = 8; - - - public final int getNumber() { return value; } - - public static State valueOf(int value) { - switch (value) { - case 0: return OFFLINE; - case 1: return PENDING_OPEN; - case 2: return OPENING; - case 3: return OPEN; - case 4: return PENDING_CLOSE; - case 5: return CLOSING; - case 6: return CLOSED; - case 7: return SPLITTING; - case 8: return SPLIT; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public State findValueByNumber(int number) { - return State.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor().getEnumTypes().get(0); - } - - private static final State[] VALUES = { - OFFLINE, PENDING_OPEN, OPENING, OPEN, PENDING_CLOSE, CLOSING, CLOSED, SPLITTING, SPLIT, - }; - - public static State valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private State(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:RegionState.State) - } - - private int bitField0_; - // required .RegionInfo regionInfo = 1; - public static final int REGIONINFO_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; - public boolean hasRegionInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; - } - - // required .RegionState.State state = 2; - public static final int STATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_; - public boolean hasState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; - } - - // optional uint64 stamp = 3; - public static final int STAMP_FIELD_NUMBER = 3; - private long stamp_; - public boolean hasStamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getStamp() { - return stamp_; - } - - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - stamp_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionInfo()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasState()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegionInfo().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, state_.getNumber()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, stamp_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, state_.getNumber()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, stamp_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) obj; - - boolean result = true; - result = result && (hasRegionInfo() == other.hasRegionInfo()); - if (hasRegionInfo()) { - result = result && getRegionInfo() - .equals(other.getRegionInfo()); - } - result = result && (hasState() == other.hasState()); - if (hasState()) { - result = result && - (getState() == other.getState()); - } - result = result && (hasStamp() == other.hasStamp()); - if (hasStamp()) { - result = result && (getStamp() - == other.getStamp()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionInfo()) { - hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; - hash = (53 * hash) + getRegionInfo().hashCode(); - } - if (hasState()) { - hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); - } - if (hasStamp()) { - hash = (37 * hash) + STAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStamp()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } else { - regionInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - bitField0_ = (bitField0_ & ~0x00000002); - stamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState build() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionInfoBuilder_ == null) { - result.regionInfo_ = regionInfo_; - } else { - result.regionInfo_ = regionInfoBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.state_ = state_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.stamp_ = stamp_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) return this; - if (other.hasRegionInfo()) { - mergeRegionInfo(other.getRegionInfo()); - } - if (other.hasState()) { - setState(other.getState()); - } - if (other.hasStamp()) { - setStamp(other.getStamp()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionInfo()) { - - return false; - } - if (!hasState()) { - - return false; - } - if (!getRegionInfo().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegionInfo()) { - subBuilder.mergeFrom(getRegionInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionInfo(subBuilder.buildPartial()); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - state_ = value; - } - break; - } - case 24: { - bitField0_ |= 0x00000004; - stamp_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionInfo regionInfo = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; - public boolean hasRegionInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - if (regionInfoBuilder_ == null) { - return regionInfo_; - } else { - return regionInfoBuilder_.getMessage(); - } - } - public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - regionInfo_ = value; - onChanged(); - } else { - regionInfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegionInfo( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - regionInfo_ = builderForValue.build(); - onChanged(); - } else { - regionInfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { - regionInfo_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); - } else { - regionInfo_ = value; - } - onChanged(); - } else { - regionInfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegionInfo() { - if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - onChanged(); - } else { - regionInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionInfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - if (regionInfoBuilder_ != null) { - return regionInfoBuilder_.getMessageOrBuilder(); - } else { - return regionInfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> - getRegionInfoFieldBuilder() { - if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, - getParentForChildren(), - isClean()); - regionInfo_ = null; - } - return regionInfoBuilder_; - } - - // required .RegionState.State state = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - public boolean hasState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; - } - public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - state_ = value; - onChanged(); - return this; - } - public Builder clearState() { - bitField0_ = (bitField0_ & ~0x00000002); - state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - onChanged(); - return this; - } - - // optional uint64 stamp = 3; - private long stamp_ ; - public boolean hasStamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getStamp() { - return stamp_; - } - public Builder setStamp(long value) { - bitField0_ |= 0x00000004; - stamp_ = value; - onChanged(); - return this; - } - public Builder clearStamp() { - bitField0_ = (bitField0_ & ~0x00000004); - stamp_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RegionState) - } - - static { - defaultInstance = new RegionState(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionState) - } - - public interface RegionInTransitionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier spec = 1; - boolean hasSpec(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder(); - - // required .RegionState regionState = 2; - boolean hasRegionState(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder(); - } - public static final class RegionInTransition extends - com.google.protobuf.GeneratedMessage - implements RegionInTransitionOrBuilder { - // Use RegionInTransition.newBuilder() to construct. - private RegionInTransition(Builder builder) { - super(builder); - } - private RegionInTransition(boolean noInit) {} - - private static final RegionInTransition defaultInstance; - public static RegionInTransition getDefaultInstance() { - return defaultInstance; - } - - public RegionInTransition getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier spec = 1; - public static final int SPEC_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_; - public boolean hasSpec() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { - return spec_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { - return spec_; - } - - // required .RegionState regionState = 2; - public static final int REGIONSTATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_; - public boolean hasRegionState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { - return regionState_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { - return regionState_; - } - - private void initFields() { - spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasSpec()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRegionState()) { - memoizedIsInitialized = 0; - return false; - } - if (!getSpec().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegionState().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, spec_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, regionState_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, spec_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, regionState_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) obj; - - boolean result = true; - result = result && (hasSpec() == other.hasSpec()); - if (hasSpec()) { - result = result && getSpec() - .equals(other.getSpec()); - } - result = result && (hasRegionState() == other.hasRegionState()); - if (hasRegionState()) { - result = result && getRegionState() - .equals(other.getRegionState()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSpec()) { - hash = (37 * hash) + SPEC_FIELD_NUMBER; - hash = (53 * hash) + getSpec().hashCode(); - } - if (hasRegionState()) { - hash = (37 * hash) + REGIONSTATE_FIELD_NUMBER; - hash = (53 * hash) + getRegionState().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getSpecFieldBuilder(); - getRegionStateFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (specBuilder_ == null) { - spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - specBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (regionStateBuilder_ == null) { - regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - } else { - regionStateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition build() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (specBuilder_ == null) { - result.spec_ = spec_; - } else { - result.spec_ = specBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (regionStateBuilder_ == null) { - result.regionState_ = regionState_; - } else { - result.regionState_ = regionStateBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()) return this; - if (other.hasSpec()) { - mergeSpec(other.getSpec()); - } - if (other.hasRegionState()) { - mergeRegionState(other.getRegionState()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasSpec()) { - - return false; - } - if (!hasRegionState()) { - - return false; - } - if (!getSpec().isInitialized()) { - - return false; - } - if (!getRegionState().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasSpec()) { - subBuilder.mergeFrom(getSpec()); - } - input.readMessage(subBuilder, extensionRegistry); - setSpec(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(); - if (hasRegionState()) { - subBuilder.mergeFrom(getRegionState()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionState(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier spec = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_; - public boolean hasSpec() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { - if (specBuilder_ == null) { - return spec_; - } else { - return specBuilder_.getMessage(); - } - } - public Builder setSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (specBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - spec_ = value; - onChanged(); - } else { - specBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setSpec( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (specBuilder_ == null) { - spec_ = builderForValue.build(); - onChanged(); - } else { - specBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (specBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - spec_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - spec_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(spec_).mergeFrom(value).buildPartial(); - } else { - spec_ = value; - } - onChanged(); - } else { - specBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearSpec() { - if (specBuilder_ == null) { - spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - specBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getSpecBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getSpecFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { - if (specBuilder_ != null) { - return specBuilder_.getMessageOrBuilder(); - } else { - return spec_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getSpecFieldBuilder() { - if (specBuilder_ == null) { - specBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - spec_, - getParentForChildren(), - isClean()); - spec_ = null; - } - return specBuilder_; - } - - // required .RegionState regionState = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_; - public boolean hasRegionState() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { - if (regionStateBuilder_ == null) { - return regionState_; - } else { - return regionStateBuilder_.getMessage(); - } - } - public Builder setRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { - if (regionStateBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - regionState_ = value; - onChanged(); - } else { - regionStateBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setRegionState( - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder builderForValue) { - if (regionStateBuilder_ == null) { - regionState_ = builderForValue.build(); - onChanged(); - } else { - regionStateBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { - if (regionStateBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - regionState_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) { - regionState_ = - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(regionState_).mergeFrom(value).buildPartial(); - } else { - regionState_ = value; - } - onChanged(); - } else { - regionStateBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearRegionState() { - if (regionStateBuilder_ == null) { - regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - onChanged(); - } else { - regionStateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder getRegionStateBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getRegionStateFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { - if (regionStateBuilder_ != null) { - return regionStateBuilder_.getMessageOrBuilder(); - } else { - return regionState_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> - getRegionStateFieldBuilder() { - if (regionStateBuilder_ == null) { - regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder>( - regionState_, - getParentForChildren(), - isClean()); - regionState_ = null; - } - return regionStateBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RegionInTransition) - } - - static { - defaultInstance = new RegionInTransition(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionInTransition) - } - - public interface LiveServerInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName server = 1; - boolean hasServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - - // required .ServerLoad serverLoad = 2; - boolean hasServerLoad(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder(); - } - public static final class LiveServerInfo extends - com.google.protobuf.GeneratedMessage - implements LiveServerInfoOrBuilder { - // Use LiveServerInfo.newBuilder() to construct. - private LiveServerInfo(Builder builder) { - super(builder); - } - private LiveServerInfo(boolean noInit) {} - - private static final LiveServerInfo defaultInstance; - public static LiveServerInfo getDefaultInstance() { - return defaultInstance; - } - - public LiveServerInfo getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName server = 1; - public static final int SERVER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; - } - - // required .ServerLoad serverLoad = 2; - public static final int SERVERLOAD_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_; - public boolean hasServerLoad() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { - return serverLoad_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { - return serverLoad_; - } - - private void initFields() { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServer()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasServerLoad()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServer().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServerLoad().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, serverLoad_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, serverLoad_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) obj; - - boolean result = true; - result = result && (hasServer() == other.hasServer()); - if (hasServer()) { - result = result && getServer() - .equals(other.getServer()); - } - result = result && (hasServerLoad() == other.hasServerLoad()); - if (hasServerLoad()) { - result = result && getServerLoad() - .equals(other.getServerLoad()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServer()) { - hash = (37 * hash) + SERVER_FIELD_NUMBER; - hash = (53 * hash) + getServer().hashCode(); - } - if (hasServerLoad()) { - hash = (37 * hash) + SERVERLOAD_FIELD_NUMBER; - hash = (53 * hash) + getServerLoad().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerFieldBuilder(); - getServerLoadFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (serverLoadBuilder_ == null) { - serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - } else { - serverLoadBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo build() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverBuilder_ == null) { - result.server_ = server_; - } else { - result.server_ = serverBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (serverLoadBuilder_ == null) { - result.serverLoad_ = serverLoad_; - } else { - result.serverLoad_ = serverLoadBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()) return this; - if (other.hasServer()) { - mergeServer(other.getServer()); - } - if (other.hasServerLoad()) { - mergeServerLoad(other.getServerLoad()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServer()) { - - return false; - } - if (!hasServerLoad()) { - - return false; - } - if (!getServer().isInitialized()) { - - return false; - } - if (!getServerLoad().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); - if (hasServerLoad()) { - subBuilder.mergeFrom(getServerLoad()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerLoad(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName server = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - if (serverBuilder_ == null) { - return server_; - } else { - return serverBuilder_.getMessage(); - } - } - public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - server_ = value; - onChanged(); - } else { - serverBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServer( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverBuilder_ == null) { - server_ = builderForValue.build(); - onChanged(); - } else { - serverBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - server_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); - } else { - server_ = value; - } - onChanged(); - } else { - serverBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServer() { - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - if (serverBuilder_ != null) { - return serverBuilder_.getMessageOrBuilder(); - } else { - return server_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerFieldBuilder() { - if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, - getParentForChildren(), - isClean()); - server_ = null; - } - return serverBuilder_; - } - - // required .ServerLoad serverLoad = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> serverLoadBuilder_; - public boolean hasServerLoad() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { - if (serverLoadBuilder_ == null) { - return serverLoad_; - } else { - return serverLoadBuilder_.getMessage(); - } - } - public Builder setServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { - if (serverLoadBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - serverLoad_ = value; - onChanged(); - } else { - serverLoadBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setServerLoad( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { - if (serverLoadBuilder_ == null) { - serverLoad_ = builderForValue.build(); - onChanged(); - } else { - serverLoadBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { - if (serverLoadBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - serverLoad_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) { - serverLoad_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(serverLoad_).mergeFrom(value).buildPartial(); - } else { - serverLoad_ = value; - } - onChanged(); - } else { - serverLoadBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearServerLoad() { - if (serverLoadBuilder_ == null) { - serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - onChanged(); - } else { - serverLoadBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getServerLoadBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getServerLoadFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { - if (serverLoadBuilder_ != null) { - return serverLoadBuilder_.getMessageOrBuilder(); - } else { - return serverLoad_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> - getServerLoadFieldBuilder() { - if (serverLoadBuilder_ == null) { - serverLoadBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder>( - serverLoad_, - getParentForChildren(), - isClean()); - serverLoad_ = null; - } - return serverLoadBuilder_; - } - - // @@protoc_insertion_point(builder_scope:LiveServerInfo) - } - - static { - defaultInstance = new LiveServerInfo(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:LiveServerInfo) - } - - public interface ClusterStatusOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .HBaseVersionFileContent hbaseVersion = 1; - boolean hasHbaseVersion(); - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion(); - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder(); - - // repeated .LiveServerInfo liveServers = 2; - java.util.List - getLiveServersList(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index); - int getLiveServersCount(); - java.util.List - getLiveServersOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( - int index); - - // repeated .ServerName deadServers = 3; - java.util.List - getDeadServersList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index); - int getDeadServersCount(); - java.util.List - getDeadServersOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( - int index); - - // repeated .RegionInTransition regionsInTransition = 4; - java.util.List - getRegionsInTransitionList(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index); - int getRegionsInTransitionCount(); - java.util.List - getRegionsInTransitionOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( - int index); - - // optional .ClusterId clusterId = 5; - boolean hasClusterId(); - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId(); - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder(); - - // repeated .Coprocessor masterCoprocessors = 6; - java.util.List - getMasterCoprocessorsList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index); - int getMasterCoprocessorsCount(); - java.util.List - getMasterCoprocessorsOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( - int index); - - // optional .ServerName master = 7; - boolean hasMaster(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); - - // repeated .ServerName backupMasters = 8; - java.util.List - getBackupMastersList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index); - int getBackupMastersCount(); - java.util.List - getBackupMastersOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( - int index); - - // optional bool balancerOn = 9; - boolean hasBalancerOn(); - boolean getBalancerOn(); - } - public static final class ClusterStatus extends - com.google.protobuf.GeneratedMessage - implements ClusterStatusOrBuilder { - // Use ClusterStatus.newBuilder() to construct. - private ClusterStatus(Builder builder) { - super(builder); - } - private ClusterStatus(boolean noInit) {} - - private static final ClusterStatus defaultInstance; - public static ClusterStatus getDefaultInstance() { - return defaultInstance; - } - - public ClusterStatus getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; - } - - private int bitField0_; - // optional .HBaseVersionFileContent hbaseVersion = 1; - public static final int HBASEVERSION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_; - public boolean hasHbaseVersion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { - return hbaseVersion_; - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { - return hbaseVersion_; - } - - // repeated .LiveServerInfo liveServers = 2; - public static final int LIVESERVERS_FIELD_NUMBER = 2; - private java.util.List liveServers_; - public java.util.List getLiveServersList() { - return liveServers_; - } - public java.util.List - getLiveServersOrBuilderList() { - return liveServers_; - } - public int getLiveServersCount() { - return liveServers_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { - return liveServers_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( - int index) { - return liveServers_.get(index); - } - - // repeated .ServerName deadServers = 3; - public static final int DEADSERVERS_FIELD_NUMBER = 3; - private java.util.List deadServers_; - public java.util.List getDeadServersList() { - return deadServers_; - } - public java.util.List - getDeadServersOrBuilderList() { - return deadServers_; - } - public int getDeadServersCount() { - return deadServers_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { - return deadServers_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( - int index) { - return deadServers_.get(index); - } - - // repeated .RegionInTransition regionsInTransition = 4; - public static final int REGIONSINTRANSITION_FIELD_NUMBER = 4; - private java.util.List regionsInTransition_; - public java.util.List getRegionsInTransitionList() { - return regionsInTransition_; - } - public java.util.List - getRegionsInTransitionOrBuilderList() { - return regionsInTransition_; - } - public int getRegionsInTransitionCount() { - return regionsInTransition_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { - return regionsInTransition_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( - int index) { - return regionsInTransition_.get(index); - } - - // optional .ClusterId clusterId = 5; - public static final int CLUSTERID_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { - return clusterId_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { - return clusterId_; - } - - // repeated .Coprocessor masterCoprocessors = 6; - public static final int MASTERCOPROCESSORS_FIELD_NUMBER = 6; - private java.util.List masterCoprocessors_; - public java.util.List getMasterCoprocessorsList() { - return masterCoprocessors_; - } - public java.util.List - getMasterCoprocessorsOrBuilderList() { - return masterCoprocessors_; - } - public int getMasterCoprocessorsCount() { - return masterCoprocessors_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { - return masterCoprocessors_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( - int index) { - return masterCoprocessors_.get(index); - } - - // optional .ServerName master = 7; - public static final int MASTER_FIELD_NUMBER = 7; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; - public boolean hasMaster() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { - return master_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { - return master_; - } - - // repeated .ServerName backupMasters = 8; - public static final int BACKUPMASTERS_FIELD_NUMBER = 8; - private java.util.List backupMasters_; - public java.util.List getBackupMastersList() { - return backupMasters_; - } - public java.util.List - getBackupMastersOrBuilderList() { - return backupMasters_; - } - public int getBackupMastersCount() { - return backupMasters_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { - return backupMasters_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( - int index) { - return backupMasters_.get(index); - } - - // optional bool balancerOn = 9; - public static final int BALANCERON_FIELD_NUMBER = 9; - private boolean balancerOn_; - public boolean hasBalancerOn() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getBalancerOn() { - return balancerOn_; - } - - private void initFields() { - hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - liveServers_ = java.util.Collections.emptyList(); - deadServers_ = java.util.Collections.emptyList(); - regionsInTransition_ = java.util.Collections.emptyList(); - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - masterCoprocessors_ = java.util.Collections.emptyList(); - master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - backupMasters_ = java.util.Collections.emptyList(); - balancerOn_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasHbaseVersion()) { - if (!getHbaseVersion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getLiveServersCount(); i++) { - if (!getLiveServers(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getDeadServersCount(); i++) { - if (!getDeadServers(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getRegionsInTransitionCount(); i++) { - if (!getRegionsInTransition(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getMasterCoprocessorsCount(); i++) { - if (!getMasterCoprocessors(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasMaster()) { - if (!getMaster().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getBackupMastersCount(); i++) { - if (!getBackupMasters(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, hbaseVersion_); - } - for (int i = 0; i < liveServers_.size(); i++) { - output.writeMessage(2, liveServers_.get(i)); - } - for (int i = 0; i < deadServers_.size(); i++) { - output.writeMessage(3, deadServers_.get(i)); - } - for (int i = 0; i < regionsInTransition_.size(); i++) { - output.writeMessage(4, regionsInTransition_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(5, clusterId_); - } - for (int i = 0; i < masterCoprocessors_.size(); i++) { - output.writeMessage(6, masterCoprocessors_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(7, master_); - } - for (int i = 0; i < backupMasters_.size(); i++) { - output.writeMessage(8, backupMasters_.get(i)); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(9, balancerOn_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, hbaseVersion_); - } - for (int i = 0; i < liveServers_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, liveServers_.get(i)); - } - for (int i = 0; i < deadServers_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, deadServers_.get(i)); - } - for (int i = 0; i < regionsInTransition_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, regionsInTransition_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, clusterId_); - } - for (int i = 0; i < masterCoprocessors_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, masterCoprocessors_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(7, master_); - } - for (int i = 0; i < backupMasters_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(8, backupMasters_.get(i)); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(9, balancerOn_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) obj; - - boolean result = true; - result = result && (hasHbaseVersion() == other.hasHbaseVersion()); - if (hasHbaseVersion()) { - result = result && getHbaseVersion() - .equals(other.getHbaseVersion()); - } - result = result && getLiveServersList() - .equals(other.getLiveServersList()); - result = result && getDeadServersList() - .equals(other.getDeadServersList()); - result = result && getRegionsInTransitionList() - .equals(other.getRegionsInTransitionList()); - result = result && (hasClusterId() == other.hasClusterId()); - if (hasClusterId()) { - result = result && getClusterId() - .equals(other.getClusterId()); - } - result = result && getMasterCoprocessorsList() - .equals(other.getMasterCoprocessorsList()); - result = result && (hasMaster() == other.hasMaster()); - if (hasMaster()) { - result = result && getMaster() - .equals(other.getMaster()); - } - result = result && getBackupMastersList() - .equals(other.getBackupMastersList()); - result = result && (hasBalancerOn() == other.hasBalancerOn()); - if (hasBalancerOn()) { - result = result && (getBalancerOn() - == other.getBalancerOn()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasHbaseVersion()) { - hash = (37 * hash) + HBASEVERSION_FIELD_NUMBER; - hash = (53 * hash) + getHbaseVersion().hashCode(); - } - if (getLiveServersCount() > 0) { - hash = (37 * hash) + LIVESERVERS_FIELD_NUMBER; - hash = (53 * hash) + getLiveServersList().hashCode(); - } - if (getDeadServersCount() > 0) { - hash = (37 * hash) + DEADSERVERS_FIELD_NUMBER; - hash = (53 * hash) + getDeadServersList().hashCode(); - } - if (getRegionsInTransitionCount() > 0) { - hash = (37 * hash) + REGIONSINTRANSITION_FIELD_NUMBER; - hash = (53 * hash) + getRegionsInTransitionList().hashCode(); - } - if (hasClusterId()) { - hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; - hash = (53 * hash) + getClusterId().hashCode(); - } - if (getMasterCoprocessorsCount() > 0) { - hash = (37 * hash) + MASTERCOPROCESSORS_FIELD_NUMBER; - hash = (53 * hash) + getMasterCoprocessorsList().hashCode(); - } - if (hasMaster()) { - hash = (37 * hash) + MASTER_FIELD_NUMBER; - hash = (53 * hash) + getMaster().hashCode(); - } - if (getBackupMastersCount() > 0) { - hash = (37 * hash) + BACKUPMASTERS_FIELD_NUMBER; - hash = (53 * hash) + getBackupMastersList().hashCode(); - } - if (hasBalancerOn()) { - hash = (37 * hash) + BALANCERON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBalancerOn()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getHbaseVersionFieldBuilder(); - getLiveServersFieldBuilder(); - getDeadServersFieldBuilder(); - getRegionsInTransitionFieldBuilder(); - getClusterIdFieldBuilder(); - getMasterCoprocessorsFieldBuilder(); - getMasterFieldBuilder(); - getBackupMastersFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (hbaseVersionBuilder_ == null) { - hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - } else { - hbaseVersionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (liveServersBuilder_ == null) { - liveServers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - liveServersBuilder_.clear(); - } - if (deadServersBuilder_ == null) { - deadServers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - deadServersBuilder_.clear(); - } - if (regionsInTransitionBuilder_ == null) { - regionsInTransition_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - regionsInTransitionBuilder_.clear(); - } - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - if (masterCoprocessorsBuilder_ == null) { - masterCoprocessors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - } else { - masterCoprocessorsBuilder_.clear(); - } - if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - masterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000040); - if (backupMastersBuilder_ == null) { - backupMasters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000080); - } else { - backupMastersBuilder_.clear(); - } - balancerOn_ = false; - bitField0_ = (bitField0_ & ~0x00000100); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus build() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (hbaseVersionBuilder_ == null) { - result.hbaseVersion_ = hbaseVersion_; - } else { - result.hbaseVersion_ = hbaseVersionBuilder_.build(); - } - if (liveServersBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - liveServers_ = java.util.Collections.unmodifiableList(liveServers_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.liveServers_ = liveServers_; - } else { - result.liveServers_ = liveServersBuilder_.build(); - } - if (deadServersBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - deadServers_ = java.util.Collections.unmodifiableList(deadServers_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.deadServers_ = deadServers_; - } else { - result.deadServers_ = deadServersBuilder_.build(); - } - if (regionsInTransitionBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - regionsInTransition_ = java.util.Collections.unmodifiableList(regionsInTransition_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.regionsInTransition_ = regionsInTransition_; - } else { - result.regionsInTransition_ = regionsInTransitionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000002; - } - if (clusterIdBuilder_ == null) { - result.clusterId_ = clusterId_; - } else { - result.clusterId_ = clusterIdBuilder_.build(); - } - if (masterCoprocessorsBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020)) { - masterCoprocessors_ = java.util.Collections.unmodifiableList(masterCoprocessors_); - bitField0_ = (bitField0_ & ~0x00000020); - } - result.masterCoprocessors_ = masterCoprocessors_; - } else { - result.masterCoprocessors_ = masterCoprocessorsBuilder_.build(); - } - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000004; - } - if (masterBuilder_ == null) { - result.master_ = master_; - } else { - result.master_ = masterBuilder_.build(); - } - if (backupMastersBuilder_ == null) { - if (((bitField0_ & 0x00000080) == 0x00000080)) { - backupMasters_ = java.util.Collections.unmodifiableList(backupMasters_); - bitField0_ = (bitField0_ & ~0x00000080); - } - result.backupMasters_ = backupMasters_; - } else { - result.backupMasters_ = backupMastersBuilder_.build(); - } - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000008; - } - result.balancerOn_ = balancerOn_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) return this; - if (other.hasHbaseVersion()) { - mergeHbaseVersion(other.getHbaseVersion()); - } - if (liveServersBuilder_ == null) { - if (!other.liveServers_.isEmpty()) { - if (liveServers_.isEmpty()) { - liveServers_ = other.liveServers_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureLiveServersIsMutable(); - liveServers_.addAll(other.liveServers_); - } - onChanged(); - } - } else { - if (!other.liveServers_.isEmpty()) { - if (liveServersBuilder_.isEmpty()) { - liveServersBuilder_.dispose(); - liveServersBuilder_ = null; - liveServers_ = other.liveServers_; - bitField0_ = (bitField0_ & ~0x00000002); - liveServersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getLiveServersFieldBuilder() : null; - } else { - liveServersBuilder_.addAllMessages(other.liveServers_); - } - } - } - if (deadServersBuilder_ == null) { - if (!other.deadServers_.isEmpty()) { - if (deadServers_.isEmpty()) { - deadServers_ = other.deadServers_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureDeadServersIsMutable(); - deadServers_.addAll(other.deadServers_); - } - onChanged(); - } - } else { - if (!other.deadServers_.isEmpty()) { - if (deadServersBuilder_.isEmpty()) { - deadServersBuilder_.dispose(); - deadServersBuilder_ = null; - deadServers_ = other.deadServers_; - bitField0_ = (bitField0_ & ~0x00000004); - deadServersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getDeadServersFieldBuilder() : null; - } else { - deadServersBuilder_.addAllMessages(other.deadServers_); - } - } - } - if (regionsInTransitionBuilder_ == null) { - if (!other.regionsInTransition_.isEmpty()) { - if (regionsInTransition_.isEmpty()) { - regionsInTransition_ = other.regionsInTransition_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.addAll(other.regionsInTransition_); - } - onChanged(); - } - } else { - if (!other.regionsInTransition_.isEmpty()) { - if (regionsInTransitionBuilder_.isEmpty()) { - regionsInTransitionBuilder_.dispose(); - regionsInTransitionBuilder_ = null; - regionsInTransition_ = other.regionsInTransition_; - bitField0_ = (bitField0_ & ~0x00000008); - regionsInTransitionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRegionsInTransitionFieldBuilder() : null; - } else { - regionsInTransitionBuilder_.addAllMessages(other.regionsInTransition_); - } - } - } - if (other.hasClusterId()) { - mergeClusterId(other.getClusterId()); - } - if (masterCoprocessorsBuilder_ == null) { - if (!other.masterCoprocessors_.isEmpty()) { - if (masterCoprocessors_.isEmpty()) { - masterCoprocessors_ = other.masterCoprocessors_; - bitField0_ = (bitField0_ & ~0x00000020); - } else { - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.addAll(other.masterCoprocessors_); - } - onChanged(); - } - } else { - if (!other.masterCoprocessors_.isEmpty()) { - if (masterCoprocessorsBuilder_.isEmpty()) { - masterCoprocessorsBuilder_.dispose(); - masterCoprocessorsBuilder_ = null; - masterCoprocessors_ = other.masterCoprocessors_; - bitField0_ = (bitField0_ & ~0x00000020); - masterCoprocessorsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getMasterCoprocessorsFieldBuilder() : null; - } else { - masterCoprocessorsBuilder_.addAllMessages(other.masterCoprocessors_); - } - } - } - if (other.hasMaster()) { - mergeMaster(other.getMaster()); - } - if (backupMastersBuilder_ == null) { - if (!other.backupMasters_.isEmpty()) { - if (backupMasters_.isEmpty()) { - backupMasters_ = other.backupMasters_; - bitField0_ = (bitField0_ & ~0x00000080); - } else { - ensureBackupMastersIsMutable(); - backupMasters_.addAll(other.backupMasters_); - } - onChanged(); - } - } else { - if (!other.backupMasters_.isEmpty()) { - if (backupMastersBuilder_.isEmpty()) { - backupMastersBuilder_.dispose(); - backupMastersBuilder_ = null; - backupMasters_ = other.backupMasters_; - bitField0_ = (bitField0_ & ~0x00000080); - backupMastersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getBackupMastersFieldBuilder() : null; - } else { - backupMastersBuilder_.addAllMessages(other.backupMasters_); - } - } - } - if (other.hasBalancerOn()) { - setBalancerOn(other.getBalancerOn()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasHbaseVersion()) { - if (!getHbaseVersion().isInitialized()) { - - return false; - } - } - for (int i = 0; i < getLiveServersCount(); i++) { - if (!getLiveServers(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getDeadServersCount(); i++) { - if (!getDeadServers(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getRegionsInTransitionCount(); i++) { - if (!getRegionsInTransition(i).isInitialized()) { - - return false; - } - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - - return false; - } - } - for (int i = 0; i < getMasterCoprocessorsCount(); i++) { - if (!getMasterCoprocessors(i).isInitialized()) { - - return false; - } - } - if (hasMaster()) { - if (!getMaster().isInitialized()) { - - return false; - } - } - for (int i = 0; i < getBackupMastersCount(); i++) { - if (!getBackupMasters(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(); - if (hasHbaseVersion()) { - subBuilder.mergeFrom(getHbaseVersion()); - } - input.readMessage(subBuilder, extensionRegistry); - setHbaseVersion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addLiveServers(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addDeadServers(subBuilder.buildPartial()); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionsInTransition(subBuilder.buildPartial()); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(); - if (hasClusterId()) { - subBuilder.mergeFrom(getClusterId()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterId(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMasterCoprocessors(subBuilder.buildPartial()); - break; - } - case 58: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasMaster()) { - subBuilder.mergeFrom(getMaster()); - } - input.readMessage(subBuilder, extensionRegistry); - setMaster(subBuilder.buildPartial()); - break; - } - case 66: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addBackupMasters(subBuilder.buildPartial()); - break; - } - case 72: { - bitField0_ |= 0x00000100; - balancerOn_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional .HBaseVersionFileContent hbaseVersion = 1; - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> hbaseVersionBuilder_; - public boolean hasHbaseVersion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { - if (hbaseVersionBuilder_ == null) { - return hbaseVersion_; - } else { - return hbaseVersionBuilder_.getMessage(); - } - } - public Builder setHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { - if (hbaseVersionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - hbaseVersion_ = value; - onChanged(); - } else { - hbaseVersionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setHbaseVersion( - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder builderForValue) { - if (hbaseVersionBuilder_ == null) { - hbaseVersion_ = builderForValue.build(); - onChanged(); - } else { - hbaseVersionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { - if (hbaseVersionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - hbaseVersion_ != org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) { - hbaseVersion_ = - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(hbaseVersion_).mergeFrom(value).buildPartial(); - } else { - hbaseVersion_ = value; - } - onChanged(); - } else { - hbaseVersionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearHbaseVersion() { - if (hbaseVersionBuilder_ == null) { - hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - onChanged(); - } else { - hbaseVersionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder getHbaseVersionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getHbaseVersionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { - if (hbaseVersionBuilder_ != null) { - return hbaseVersionBuilder_.getMessageOrBuilder(); - } else { - return hbaseVersion_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> - getHbaseVersionFieldBuilder() { - if (hbaseVersionBuilder_ == null) { - hbaseVersionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder>( - hbaseVersion_, - getParentForChildren(), - isClean()); - hbaseVersion_ = null; - } - return hbaseVersionBuilder_; - } - - // repeated .LiveServerInfo liveServers = 2; - private java.util.List liveServers_ = - java.util.Collections.emptyList(); - private void ensureLiveServersIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - liveServers_ = new java.util.ArrayList(liveServers_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> liveServersBuilder_; - - public java.util.List getLiveServersList() { - if (liveServersBuilder_ == null) { - return java.util.Collections.unmodifiableList(liveServers_); - } else { - return liveServersBuilder_.getMessageList(); - } - } - public int getLiveServersCount() { - if (liveServersBuilder_ == null) { - return liveServers_.size(); - } else { - return liveServersBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { - if (liveServersBuilder_ == null) { - return liveServers_.get(index); - } else { - return liveServersBuilder_.getMessage(index); - } - } - public Builder setLiveServers( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { - if (liveServersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureLiveServersIsMutable(); - liveServers_.set(index, value); - onChanged(); - } else { - liveServersBuilder_.setMessage(index, value); - } - return this; - } - public Builder setLiveServers( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { - if (liveServersBuilder_ == null) { - ensureLiveServersIsMutable(); - liveServers_.set(index, builderForValue.build()); - onChanged(); - } else { - liveServersBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addLiveServers(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { - if (liveServersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureLiveServersIsMutable(); - liveServers_.add(value); - onChanged(); - } else { - liveServersBuilder_.addMessage(value); - } - return this; - } - public Builder addLiveServers( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { - if (liveServersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureLiveServersIsMutable(); - liveServers_.add(index, value); - onChanged(); - } else { - liveServersBuilder_.addMessage(index, value); - } - return this; - } - public Builder addLiveServers( - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { - if (liveServersBuilder_ == null) { - ensureLiveServersIsMutable(); - liveServers_.add(builderForValue.build()); - onChanged(); - } else { - liveServersBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addLiveServers( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { - if (liveServersBuilder_ == null) { - ensureLiveServersIsMutable(); - liveServers_.add(index, builderForValue.build()); - onChanged(); - } else { - liveServersBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllLiveServers( - java.lang.Iterable values) { - if (liveServersBuilder_ == null) { - ensureLiveServersIsMutable(); - super.addAll(values, liveServers_); - onChanged(); - } else { - liveServersBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearLiveServers() { - if (liveServersBuilder_ == null) { - liveServers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - liveServersBuilder_.clear(); - } - return this; - } - public Builder removeLiveServers(int index) { - if (liveServersBuilder_ == null) { - ensureLiveServersIsMutable(); - liveServers_.remove(index); - onChanged(); - } else { - liveServersBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder getLiveServersBuilder( - int index) { - return getLiveServersFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( - int index) { - if (liveServersBuilder_ == null) { - return liveServers_.get(index); } else { - return liveServersBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getLiveServersOrBuilderList() { - if (liveServersBuilder_ != null) { - return liveServersBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(liveServers_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder() { - return getLiveServersFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder( - int index) { - return getLiveServersFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); - } - public java.util.List - getLiveServersBuilderList() { - return getLiveServersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> - getLiveServersFieldBuilder() { - if (liveServersBuilder_ == null) { - liveServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder>( - liveServers_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - liveServers_ = null; - } - return liveServersBuilder_; - } - - // repeated .ServerName deadServers = 3; - private java.util.List deadServers_ = - java.util.Collections.emptyList(); - private void ensureDeadServersIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - deadServers_ = new java.util.ArrayList(deadServers_); - bitField0_ |= 0x00000004; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> deadServersBuilder_; - - public java.util.List getDeadServersList() { - if (deadServersBuilder_ == null) { - return java.util.Collections.unmodifiableList(deadServers_); - } else { - return deadServersBuilder_.getMessageList(); - } - } - public int getDeadServersCount() { - if (deadServersBuilder_ == null) { - return deadServers_.size(); - } else { - return deadServersBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { - if (deadServersBuilder_ == null) { - return deadServers_.get(index); - } else { - return deadServersBuilder_.getMessage(index); - } - } - public Builder setDeadServers( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (deadServersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureDeadServersIsMutable(); - deadServers_.set(index, value); - onChanged(); - } else { - deadServersBuilder_.setMessage(index, value); - } - return this; - } - public Builder setDeadServers( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (deadServersBuilder_ == null) { - ensureDeadServersIsMutable(); - deadServers_.set(index, builderForValue.build()); - onChanged(); - } else { - deadServersBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addDeadServers(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (deadServersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureDeadServersIsMutable(); - deadServers_.add(value); - onChanged(); - } else { - deadServersBuilder_.addMessage(value); - } - return this; - } - public Builder addDeadServers( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (deadServersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureDeadServersIsMutable(); - deadServers_.add(index, value); - onChanged(); - } else { - deadServersBuilder_.addMessage(index, value); - } - return this; - } - public Builder addDeadServers( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (deadServersBuilder_ == null) { - ensureDeadServersIsMutable(); - deadServers_.add(builderForValue.build()); - onChanged(); - } else { - deadServersBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addDeadServers( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (deadServersBuilder_ == null) { - ensureDeadServersIsMutable(); - deadServers_.add(index, builderForValue.build()); - onChanged(); - } else { - deadServersBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllDeadServers( - java.lang.Iterable values) { - if (deadServersBuilder_ == null) { - ensureDeadServersIsMutable(); - super.addAll(values, deadServers_); - onChanged(); - } else { - deadServersBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearDeadServers() { - if (deadServersBuilder_ == null) { - deadServers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - deadServersBuilder_.clear(); - } - return this; - } - public Builder removeDeadServers(int index) { - if (deadServersBuilder_ == null) { - ensureDeadServersIsMutable(); - deadServers_.remove(index); - onChanged(); - } else { - deadServersBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDeadServersBuilder( - int index) { - return getDeadServersFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( - int index) { - if (deadServersBuilder_ == null) { - return deadServers_.get(index); } else { - return deadServersBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getDeadServersOrBuilderList() { - if (deadServersBuilder_ != null) { - return deadServersBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(deadServers_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder() { - return getDeadServersFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder( - int index) { - return getDeadServersFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); - } - public java.util.List - getDeadServersBuilderList() { - return getDeadServersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getDeadServersFieldBuilder() { - if (deadServersBuilder_ == null) { - deadServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - deadServers_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - deadServers_ = null; - } - return deadServersBuilder_; - } - - // repeated .RegionInTransition regionsInTransition = 4; - private java.util.List regionsInTransition_ = - java.util.Collections.emptyList(); - private void ensureRegionsInTransitionIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - regionsInTransition_ = new java.util.ArrayList(regionsInTransition_); - bitField0_ |= 0x00000008; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> regionsInTransitionBuilder_; - - public java.util.List getRegionsInTransitionList() { - if (regionsInTransitionBuilder_ == null) { - return java.util.Collections.unmodifiableList(regionsInTransition_); - } else { - return regionsInTransitionBuilder_.getMessageList(); - } - } - public int getRegionsInTransitionCount() { - if (regionsInTransitionBuilder_ == null) { - return regionsInTransition_.size(); - } else { - return regionsInTransitionBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { - if (regionsInTransitionBuilder_ == null) { - return regionsInTransition_.get(index); - } else { - return regionsInTransitionBuilder_.getMessage(index); - } - } - public Builder setRegionsInTransition( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { - if (regionsInTransitionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.set(index, value); - onChanged(); - } else { - regionsInTransitionBuilder_.setMessage(index, value); - } - return this; - } - public Builder setRegionsInTransition( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { - if (regionsInTransitionBuilder_ == null) { - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.set(index, builderForValue.build()); - onChanged(); - } else { - regionsInTransitionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addRegionsInTransition(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { - if (regionsInTransitionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.add(value); - onChanged(); - } else { - regionsInTransitionBuilder_.addMessage(value); - } - return this; - } - public Builder addRegionsInTransition( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { - if (regionsInTransitionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.add(index, value); - onChanged(); - } else { - regionsInTransitionBuilder_.addMessage(index, value); - } - return this; - } - public Builder addRegionsInTransition( - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { - if (regionsInTransitionBuilder_ == null) { - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.add(builderForValue.build()); - onChanged(); - } else { - regionsInTransitionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addRegionsInTransition( - int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { - if (regionsInTransitionBuilder_ == null) { - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.add(index, builderForValue.build()); - onChanged(); - } else { - regionsInTransitionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllRegionsInTransition( - java.lang.Iterable values) { - if (regionsInTransitionBuilder_ == null) { - ensureRegionsInTransitionIsMutable(); - super.addAll(values, regionsInTransition_); - onChanged(); - } else { - regionsInTransitionBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearRegionsInTransition() { - if (regionsInTransitionBuilder_ == null) { - regionsInTransition_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - regionsInTransitionBuilder_.clear(); - } - return this; - } - public Builder removeRegionsInTransition(int index) { - if (regionsInTransitionBuilder_ == null) { - ensureRegionsInTransitionIsMutable(); - regionsInTransition_.remove(index); - onChanged(); - } else { - regionsInTransitionBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder getRegionsInTransitionBuilder( - int index) { - return getRegionsInTransitionFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( - int index) { - if (regionsInTransitionBuilder_ == null) { - return regionsInTransition_.get(index); } else { - return regionsInTransitionBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getRegionsInTransitionOrBuilderList() { - if (regionsInTransitionBuilder_ != null) { - return regionsInTransitionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(regionsInTransition_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder() { - return getRegionsInTransitionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder( - int index) { - return getRegionsInTransitionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); - } - public java.util.List - getRegionsInTransitionBuilderList() { - return getRegionsInTransitionFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> - getRegionsInTransitionFieldBuilder() { - if (regionsInTransitionBuilder_ == null) { - regionsInTransitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder>( - regionsInTransition_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - regionsInTransition_ = null; - } - return regionsInTransitionBuilder_; - } - - // optional .ClusterId clusterId = 5; - private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> clusterIdBuilder_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { - if (clusterIdBuilder_ == null) { - return clusterId_; - } else { - return clusterIdBuilder_.getMessage(); - } - } - public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { - if (clusterIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - clusterId_ = value; - onChanged(); - } else { - clusterIdBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setClusterId( - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder builderForValue) { - if (clusterIdBuilder_ == null) { - clusterId_ = builderForValue.build(); - onChanged(); - } else { - clusterIdBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { - if (clusterIdBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - clusterId_ != org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) { - clusterId_ = - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(clusterId_).mergeFrom(value).buildPartial(); - } else { - clusterId_ = value; - } - onChanged(); - } else { - clusterIdBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearClusterId() { - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - onChanged(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder getClusterIdBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getClusterIdFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { - if (clusterIdBuilder_ != null) { - return clusterIdBuilder_.getMessageOrBuilder(); - } else { - return clusterId_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> - getClusterIdFieldBuilder() { - if (clusterIdBuilder_ == null) { - clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder>( - clusterId_, - getParentForChildren(), - isClean()); - clusterId_ = null; - } - return clusterIdBuilder_; - } - - // repeated .Coprocessor masterCoprocessors = 6; - private java.util.List masterCoprocessors_ = - java.util.Collections.emptyList(); - private void ensureMasterCoprocessorsIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - masterCoprocessors_ = new java.util.ArrayList(masterCoprocessors_); - bitField0_ |= 0x00000020; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> masterCoprocessorsBuilder_; - - public java.util.List getMasterCoprocessorsList() { - if (masterCoprocessorsBuilder_ == null) { - return java.util.Collections.unmodifiableList(masterCoprocessors_); - } else { - return masterCoprocessorsBuilder_.getMessageList(); - } - } - public int getMasterCoprocessorsCount() { - if (masterCoprocessorsBuilder_ == null) { - return masterCoprocessors_.size(); - } else { - return masterCoprocessorsBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { - if (masterCoprocessorsBuilder_ == null) { - return masterCoprocessors_.get(index); - } else { - return masterCoprocessorsBuilder_.getMessage(index); - } - } - public Builder setMasterCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (masterCoprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.set(index, value); - onChanged(); - } else { - masterCoprocessorsBuilder_.setMessage(index, value); - } - return this; - } - public Builder setMasterCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (masterCoprocessorsBuilder_ == null) { - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.set(index, builderForValue.build()); - onChanged(); - } else { - masterCoprocessorsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addMasterCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (masterCoprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.add(value); - onChanged(); - } else { - masterCoprocessorsBuilder_.addMessage(value); - } - return this; - } - public Builder addMasterCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (masterCoprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.add(index, value); - onChanged(); - } else { - masterCoprocessorsBuilder_.addMessage(index, value); - } - return this; - } - public Builder addMasterCoprocessors( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (masterCoprocessorsBuilder_ == null) { - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.add(builderForValue.build()); - onChanged(); - } else { - masterCoprocessorsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addMasterCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (masterCoprocessorsBuilder_ == null) { - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.add(index, builderForValue.build()); - onChanged(); - } else { - masterCoprocessorsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllMasterCoprocessors( - java.lang.Iterable values) { - if (masterCoprocessorsBuilder_ == null) { - ensureMasterCoprocessorsIsMutable(); - super.addAll(values, masterCoprocessors_); - onChanged(); - } else { - masterCoprocessorsBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearMasterCoprocessors() { - if (masterCoprocessorsBuilder_ == null) { - masterCoprocessors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - } else { - masterCoprocessorsBuilder_.clear(); - } - return this; - } - public Builder removeMasterCoprocessors(int index) { - if (masterCoprocessorsBuilder_ == null) { - ensureMasterCoprocessorsIsMutable(); - masterCoprocessors_.remove(index); - onChanged(); - } else { - masterCoprocessorsBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getMasterCoprocessorsBuilder( - int index) { - return getMasterCoprocessorsFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( - int index) { - if (masterCoprocessorsBuilder_ == null) { - return masterCoprocessors_.get(index); } else { - return masterCoprocessorsBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getMasterCoprocessorsOrBuilderList() { - if (masterCoprocessorsBuilder_ != null) { - return masterCoprocessorsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(masterCoprocessors_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder() { - return getMasterCoprocessorsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder( - int index) { - return getMasterCoprocessorsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); - } - public java.util.List - getMasterCoprocessorsBuilderList() { - return getMasterCoprocessorsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> - getMasterCoprocessorsFieldBuilder() { - if (masterCoprocessorsBuilder_ == null) { - masterCoprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( - masterCoprocessors_, - ((bitField0_ & 0x00000020) == 0x00000020), - getParentForChildren(), - isClean()); - masterCoprocessors_ = null; - } - return masterCoprocessorsBuilder_; - } - - // optional .ServerName master = 7; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; - public boolean hasMaster() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { - if (masterBuilder_ == null) { - return master_; - } else { - return masterBuilder_.getMessage(); - } - } - public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (masterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - master_ = value; - onChanged(); - } else { - masterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000040; - return this; - } - public Builder setMaster( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (masterBuilder_ == null) { - master_ = builderForValue.build(); - onChanged(); - } else { - masterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000040; - return this; - } - public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (masterBuilder_ == null) { - if (((bitField0_ & 0x00000040) == 0x00000040) && - master_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - master_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); - } else { - master_ = value; - } - onChanged(); - } else { - masterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000040; - return this; - } - public Builder clearMaster() { - if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - masterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000040); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { - bitField0_ |= 0x00000040; - onChanged(); - return getMasterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { - if (masterBuilder_ != null) { - return masterBuilder_.getMessageOrBuilder(); - } else { - return master_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getMasterFieldBuilder() { - if (masterBuilder_ == null) { - masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - master_, - getParentForChildren(), - isClean()); - master_ = null; - } - return masterBuilder_; - } - - // repeated .ServerName backupMasters = 8; - private java.util.List backupMasters_ = - java.util.Collections.emptyList(); - private void ensureBackupMastersIsMutable() { - if (!((bitField0_ & 0x00000080) == 0x00000080)) { - backupMasters_ = new java.util.ArrayList(backupMasters_); - bitField0_ |= 0x00000080; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> backupMastersBuilder_; - - public java.util.List getBackupMastersList() { - if (backupMastersBuilder_ == null) { - return java.util.Collections.unmodifiableList(backupMasters_); - } else { - return backupMastersBuilder_.getMessageList(); - } - } - public int getBackupMastersCount() { - if (backupMastersBuilder_ == null) { - return backupMasters_.size(); - } else { - return backupMastersBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { - if (backupMastersBuilder_ == null) { - return backupMasters_.get(index); - } else { - return backupMastersBuilder_.getMessage(index); - } - } - public Builder setBackupMasters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (backupMastersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureBackupMastersIsMutable(); - backupMasters_.set(index, value); - onChanged(); - } else { - backupMastersBuilder_.setMessage(index, value); - } - return this; - } - public Builder setBackupMasters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (backupMastersBuilder_ == null) { - ensureBackupMastersIsMutable(); - backupMasters_.set(index, builderForValue.build()); - onChanged(); - } else { - backupMastersBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addBackupMasters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (backupMastersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureBackupMastersIsMutable(); - backupMasters_.add(value); - onChanged(); - } else { - backupMastersBuilder_.addMessage(value); - } - return this; - } - public Builder addBackupMasters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (backupMastersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureBackupMastersIsMutable(); - backupMasters_.add(index, value); - onChanged(); - } else { - backupMastersBuilder_.addMessage(index, value); - } - return this; - } - public Builder addBackupMasters( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (backupMastersBuilder_ == null) { - ensureBackupMastersIsMutable(); - backupMasters_.add(builderForValue.build()); - onChanged(); - } else { - backupMastersBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addBackupMasters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (backupMastersBuilder_ == null) { - ensureBackupMastersIsMutable(); - backupMasters_.add(index, builderForValue.build()); - onChanged(); - } else { - backupMastersBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllBackupMasters( - java.lang.Iterable values) { - if (backupMastersBuilder_ == null) { - ensureBackupMastersIsMutable(); - super.addAll(values, backupMasters_); - onChanged(); - } else { - backupMastersBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearBackupMasters() { - if (backupMastersBuilder_ == null) { - backupMasters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000080); - onChanged(); - } else { - backupMastersBuilder_.clear(); - } - return this; - } - public Builder removeBackupMasters(int index) { - if (backupMastersBuilder_ == null) { - ensureBackupMastersIsMutable(); - backupMasters_.remove(index); - onChanged(); - } else { - backupMastersBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getBackupMastersBuilder( - int index) { - return getBackupMastersFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( - int index) { - if (backupMastersBuilder_ == null) { - return backupMasters_.get(index); } else { - return backupMastersBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getBackupMastersOrBuilderList() { - if (backupMastersBuilder_ != null) { - return backupMastersBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(backupMasters_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder() { - return getBackupMastersFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder( - int index) { - return getBackupMastersFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); - } - public java.util.List - getBackupMastersBuilderList() { - return getBackupMastersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getBackupMastersFieldBuilder() { - if (backupMastersBuilder_ == null) { - backupMastersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - backupMasters_, - ((bitField0_ & 0x00000080) == 0x00000080), - getParentForChildren(), - isClean()); - backupMasters_ = null; - } - return backupMastersBuilder_; - } - - // optional bool balancerOn = 9; - private boolean balancerOn_ ; - public boolean hasBalancerOn() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public boolean getBalancerOn() { - return balancerOn_; - } - public Builder setBalancerOn(boolean value) { - bitField0_ |= 0x00000100; - balancerOn_ = value; - onChanged(); - return this; - } - public Builder clearBalancerOn() { - bitField0_ = (bitField0_ & ~0x00000100); - balancerOn_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ClusterStatus) - } - - static { - defaultInstance = new ClusterStatus(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ClusterStatus) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionState_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionInTransition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionInTransition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_LiveServerInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_LiveServerInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ClusterStatus_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ClusterStatus_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\023ClusterStatus.proto\032\013hbase.proto\032\017Clus" + - "terId.proto\032\010FS.proto\"\346\001\n\013RegionState\022\037\n" + - "\nregionInfo\030\001 \002(\0132\013.RegionInfo\022!\n\005state\030" + - "\002 \002(\0162\022.RegionState.State\022\r\n\005stamp\030\003 \001(\004" + - "\"\203\001\n\005State\022\013\n\007OFFLINE\020\000\022\020\n\014PENDING_OPEN\020" + - "\001\022\013\n\007OPENING\020\002\022\010\n\004OPEN\020\003\022\021\n\rPENDING_CLOS" + - "E\020\004\022\013\n\007CLOSING\020\005\022\n\n\006CLOSED\020\006\022\r\n\tSPLITTIN" + - "G\020\007\022\t\n\005SPLIT\020\010\"W\n\022RegionInTransition\022\036\n\004" + - "spec\030\001 \002(\0132\020.RegionSpecifier\022!\n\013regionSt" + - "ate\030\002 \002(\0132\014.RegionState\"N\n\016LiveServerInf", - "o\022\033\n\006server\030\001 \002(\0132\013.ServerName\022\037\n\nserver" + - "Load\030\002 \002(\0132\013.ServerLoad\"\327\002\n\rClusterStatu" + - "s\022.\n\014hbaseVersion\030\001 \001(\0132\030.HBaseVersionFi" + - "leContent\022$\n\013liveServers\030\002 \003(\0132\017.LiveSer" + - "verInfo\022 \n\013deadServers\030\003 \003(\0132\013.ServerNam" + - "e\0220\n\023regionsInTransition\030\004 \003(\0132\023.RegionI" + - "nTransition\022\035\n\tclusterId\030\005 \001(\0132\n.Cluster" + - "Id\022(\n\022masterCoprocessors\030\006 \003(\0132\014.Coproce" + - "ssor\022\033\n\006master\030\007 \001(\0132\013.ServerName\022\"\n\rbac" + - "kupMasters\030\010 \003(\0132\013.ServerName\022\022\n\nbalance", - "rOn\030\t \001(\010BF\n*org.apache.hadoop.hbase.pro" + - "tobuf.generatedB\023ClusterStatusProtosH\001\240\001" + - "\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_RegionState_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_RegionState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionState_descriptor, - new java.lang.String[] { "RegionInfo", "State", "Stamp", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); - internal_static_RegionInTransition_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_RegionInTransition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionInTransition_descriptor, - new java.lang.String[] { "Spec", "RegionState", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); - internal_static_LiveServerInfo_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_LiveServerInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_LiveServerInfo_descriptor, - new java.lang.String[] { "Server", "ServerLoad", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); - internal_static_ClusterStatus_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_ClusterStatus_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ClusterStatus_descriptor, - new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.getDescriptor(), - org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java deleted file mode 100644 index 983b57e..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java +++ /dev/null @@ -1,3881 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Comparator.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class ComparatorProtos { - private ComparatorProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // optional bytes serializedComparator = 2; - boolean hasSerializedComparator(); - com.google.protobuf.ByteString getSerializedComparator(); - } - public static final class Comparator extends - com.google.protobuf.GeneratedMessage - implements ComparatorOrBuilder { - // Use Comparator.newBuilder() to construct. - private Comparator(Builder builder) { - super(builder); - } - private Comparator(boolean noInit) {} - - private static final Comparator defaultInstance; - public static Comparator getDefaultInstance() { - return defaultInstance; - } - - public Comparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes serializedComparator = 2; - public static final int SERIALIZEDCOMPARATOR_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString serializedComparator_; - public boolean hasSerializedComparator() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSerializedComparator() { - return serializedComparator_; - } - - private void initFields() { - name_ = ""; - serializedComparator_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, serializedComparator_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, serializedComparator_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasSerializedComparator() == other.hasSerializedComparator()); - if (hasSerializedComparator()) { - result = result && getSerializedComparator() - .equals(other.getSerializedComparator()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasSerializedComparator()) { - hash = (37 * hash) + SERIALIZEDCOMPARATOR_FIELD_NUMBER; - hash = (53 * hash) + getSerializedComparator().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_Comparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - serializedComparator_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.serializedComparator_ = serializedComparator_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasSerializedComparator()) { - setSerializedComparator(other.getSerializedComparator()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - serializedComparator_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // optional bytes serializedComparator = 2; - private com.google.protobuf.ByteString serializedComparator_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSerializedComparator() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSerializedComparator() { - return serializedComparator_; - } - public Builder setSerializedComparator(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - serializedComparator_ = value; - onChanged(); - return this; - } - public Builder clearSerializedComparator() { - bitField0_ = (bitField0_ & ~0x00000002); - serializedComparator_ = getDefaultInstance().getSerializedComparator(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Comparator) - } - - static { - defaultInstance = new Comparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Comparator) - } - - public interface ByteArrayComparableOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes value = 1; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class ByteArrayComparable extends - com.google.protobuf.GeneratedMessage - implements ByteArrayComparableOrBuilder { - // Use ByteArrayComparable.newBuilder() to construct. - private ByteArrayComparable(Builder builder) { - super(builder); - } - private ByteArrayComparable(boolean noInit) {} - - private static final ByteArrayComparable defaultInstance; - public static ByteArrayComparable getDefaultInstance() { - return defaultInstance; - } - - public ByteArrayComparable getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) obj; - - boolean result = true; - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_ByteArrayComparable_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) return this; - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes value = 1; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000001); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ByteArrayComparable) - } - - static { - defaultInstance = new ByteArrayComparable(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ByteArrayComparable) - } - - public interface BinaryComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ByteArrayComparable comparable = 1; - boolean hasComparable(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); - } - public static final class BinaryComparator extends - com.google.protobuf.GeneratedMessage - implements BinaryComparatorOrBuilder { - // Use BinaryComparator.newBuilder() to construct. - private BinaryComparator(Builder builder) { - super(builder); - } - private BinaryComparator(boolean noInit) {} - - private static final BinaryComparator defaultInstance; - public static BinaryComparator getDefaultInstance() { - return defaultInstance; - } - - public BinaryComparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable; - } - - private int bitField0_; - // required .ByteArrayComparable comparable = 1; - public static final int COMPARABLE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; - public boolean hasComparable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; - } - - private void initFields() { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasComparable()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) obj; - - boolean result = true; - result = result && (hasComparable() == other.hasComparable()); - if (hasComparable()) { - result = result && getComparable() - .equals(other.getComparable()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasComparable()) { - hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; - hash = (53 * hash) + getComparable().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryComparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getComparableFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } else { - comparableBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (comparableBuilder_ == null) { - result.comparable_ = comparable_; - } else { - result.comparable_ = comparableBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.getDefaultInstance()) return this; - if (other.hasComparable()) { - mergeComparable(other.getComparable()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasComparable()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); - if (hasComparable()) { - subBuilder.mergeFrom(getComparable()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparable(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; - public boolean hasComparable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - if (comparableBuilder_ == null) { - return comparable_; - } else { - return comparableBuilder_.getMessage(); - } - } - public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { - if (comparableBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - comparable_ = value; - onChanged(); - } else { - comparableBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setComparable( - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { - if (comparableBuilder_ == null) { - comparable_ = builderForValue.build(); - onChanged(); - } else { - comparableBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { - if (comparableBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { - comparable_ = - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); - } else { - comparable_ = value; - } - onChanged(); - } else { - comparableBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearComparable() { - if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - onChanged(); - } else { - comparableBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getComparableFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - if (comparableBuilder_ != null) { - return comparableBuilder_.getMessageOrBuilder(); - } else { - return comparable_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> - getComparableFieldBuilder() { - if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, - getParentForChildren(), - isClean()); - comparable_ = null; - } - return comparableBuilder_; - } - - // @@protoc_insertion_point(builder_scope:BinaryComparator) - } - - static { - defaultInstance = new BinaryComparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BinaryComparator) - } - - public interface BinaryPrefixComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ByteArrayComparable comparable = 1; - boolean hasComparable(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); - } - public static final class BinaryPrefixComparator extends - com.google.protobuf.GeneratedMessage - implements BinaryPrefixComparatorOrBuilder { - // Use BinaryPrefixComparator.newBuilder() to construct. - private BinaryPrefixComparator(Builder builder) { - super(builder); - } - private BinaryPrefixComparator(boolean noInit) {} - - private static final BinaryPrefixComparator defaultInstance; - public static BinaryPrefixComparator getDefaultInstance() { - return defaultInstance; - } - - public BinaryPrefixComparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable; - } - - private int bitField0_; - // required .ByteArrayComparable comparable = 1; - public static final int COMPARABLE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; - public boolean hasComparable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; - } - - private void initFields() { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasComparable()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) obj; - - boolean result = true; - result = result && (hasComparable() == other.hasComparable()); - if (hasComparable()) { - result = result && getComparable() - .equals(other.getComparable()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasComparable()) { - hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; - hash = (53 * hash) + getComparable().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BinaryPrefixComparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getComparableFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } else { - comparableBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (comparableBuilder_ == null) { - result.comparable_ = comparable_; - } else { - result.comparable_ = comparableBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.getDefaultInstance()) return this; - if (other.hasComparable()) { - mergeComparable(other.getComparable()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasComparable()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); - if (hasComparable()) { - subBuilder.mergeFrom(getComparable()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparable(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; - public boolean hasComparable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - if (comparableBuilder_ == null) { - return comparable_; - } else { - return comparableBuilder_.getMessage(); - } - } - public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { - if (comparableBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - comparable_ = value; - onChanged(); - } else { - comparableBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setComparable( - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { - if (comparableBuilder_ == null) { - comparable_ = builderForValue.build(); - onChanged(); - } else { - comparableBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { - if (comparableBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { - comparable_ = - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); - } else { - comparable_ = value; - } - onChanged(); - } else { - comparableBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearComparable() { - if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - onChanged(); - } else { - comparableBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getComparableFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - if (comparableBuilder_ != null) { - return comparableBuilder_.getMessageOrBuilder(); - } else { - return comparable_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> - getComparableFieldBuilder() { - if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, - getParentForChildren(), - isClean()); - comparable_ = null; - } - return comparableBuilder_; - } - - // @@protoc_insertion_point(builder_scope:BinaryPrefixComparator) - } - - static { - defaultInstance = new BinaryPrefixComparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BinaryPrefixComparator) - } - - public interface BitComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ByteArrayComparable comparable = 1; - boolean hasComparable(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); - - // required .BitComparator.BitwiseOp bitwiseOp = 2; - boolean hasBitwiseOp(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp(); - } - public static final class BitComparator extends - com.google.protobuf.GeneratedMessage - implements BitComparatorOrBuilder { - // Use BitComparator.newBuilder() to construct. - private BitComparator(Builder builder) { - super(builder); - } - private BitComparator(boolean noInit) {} - - private static final BitComparator defaultInstance; - public static BitComparator getDefaultInstance() { - return defaultInstance; - } - - public BitComparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable; - } - - public enum BitwiseOp - implements com.google.protobuf.ProtocolMessageEnum { - AND(0, 1), - OR(1, 2), - XOR(2, 3), - ; - - public static final int AND_VALUE = 1; - public static final int OR_VALUE = 2; - public static final int XOR_VALUE = 3; - - - public final int getNumber() { return value; } - - public static BitwiseOp valueOf(int value) { - switch (value) { - case 1: return AND; - case 2: return OR; - case 3: return XOR; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public BitwiseOp findValueByNumber(int number) { - return BitwiseOp.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDescriptor().getEnumTypes().get(0); - } - - private static final BitwiseOp[] VALUES = { - AND, OR, XOR, - }; - - public static BitwiseOp valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private BitwiseOp(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:BitComparator.BitwiseOp) - } - - private int bitField0_; - // required .ByteArrayComparable comparable = 1; - public static final int COMPARABLE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; - public boolean hasComparable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; - } - - // required .BitComparator.BitwiseOp bitwiseOp = 2; - public static final int BITWISEOP_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_; - public boolean hasBitwiseOp() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { - return bitwiseOp_; - } - - private void initFields() { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasComparable()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasBitwiseOp()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, bitwiseOp_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, bitwiseOp_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) obj; - - boolean result = true; - result = result && (hasComparable() == other.hasComparable()); - if (hasComparable()) { - result = result && getComparable() - .equals(other.getComparable()); - } - result = result && (hasBitwiseOp() == other.hasBitwiseOp()); - if (hasBitwiseOp()) { - result = result && - (getBitwiseOp() == other.getBitwiseOp()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasComparable()) { - hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; - hash = (53 * hash) + getComparable().hashCode(); - } - if (hasBitwiseOp()) { - hash = (37 * hash) + BITWISEOP_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getBitwiseOp()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BitComparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getComparableFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } else { - comparableBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (comparableBuilder_ == null) { - result.comparable_ = comparable_; - } else { - result.comparable_ = comparableBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.bitwiseOp_ = bitwiseOp_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.getDefaultInstance()) return this; - if (other.hasComparable()) { - mergeComparable(other.getComparable()); - } - if (other.hasBitwiseOp()) { - setBitwiseOp(other.getBitwiseOp()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasComparable()) { - - return false; - } - if (!hasBitwiseOp()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(); - if (hasComparable()) { - subBuilder.mergeFrom(getComparable()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparable(subBuilder.buildPartial()); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - bitwiseOp_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required .ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; - public boolean hasComparable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - if (comparableBuilder_ == null) { - return comparable_; - } else { - return comparableBuilder_.getMessage(); - } - } - public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { - if (comparableBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - comparable_ = value; - onChanged(); - } else { - comparableBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setComparable( - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { - if (comparableBuilder_ == null) { - comparable_ = builderForValue.build(); - onChanged(); - } else { - comparableBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { - if (comparableBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { - comparable_ = - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); - } else { - comparable_ = value; - } - onChanged(); - } else { - comparableBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearComparable() { - if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - onChanged(); - } else { - comparableBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getComparableFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - if (comparableBuilder_ != null) { - return comparableBuilder_.getMessageOrBuilder(); - } else { - return comparable_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> - getComparableFieldBuilder() { - if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, - getParentForChildren(), - isClean()); - comparable_ = null; - } - return comparableBuilder_; - } - - // required .BitComparator.BitwiseOp bitwiseOp = 2; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; - public boolean hasBitwiseOp() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { - return bitwiseOp_; - } - public Builder setBitwiseOp(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - bitwiseOp_ = value; - onChanged(); - return this; - } - public Builder clearBitwiseOp() { - bitField0_ = (bitField0_ & ~0x00000002); - bitwiseOp_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:BitComparator) - } - - static { - defaultInstance = new BitComparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BitComparator) - } - - public interface NullComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class NullComparator extends - com.google.protobuf.GeneratedMessage - implements NullComparatorOrBuilder { - // Use NullComparator.newBuilder() to construct. - private NullComparator(Builder builder) { - super(builder); - } - private NullComparator(boolean noInit) {} - - private static final NullComparator defaultInstance; - public static NullComparator getDefaultInstance() { - return defaultInstance; - } - - public NullComparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_NullComparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:NullComparator) - } - - static { - defaultInstance = new NullComparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:NullComparator) - } - - public interface RegexStringComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string pattern = 1; - boolean hasPattern(); - String getPattern(); - - // required int32 patternFlags = 2; - boolean hasPatternFlags(); - int getPatternFlags(); - - // required string charset = 3; - boolean hasCharset(); - String getCharset(); - } - public static final class RegexStringComparator extends - com.google.protobuf.GeneratedMessage - implements RegexStringComparatorOrBuilder { - // Use RegexStringComparator.newBuilder() to construct. - private RegexStringComparator(Builder builder) { - super(builder); - } - private RegexStringComparator(boolean noInit) {} - - private static final RegexStringComparator defaultInstance; - public static RegexStringComparator getDefaultInstance() { - return defaultInstance; - } - - public RegexStringComparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable; - } - - private int bitField0_; - // required string pattern = 1; - public static final int PATTERN_FIELD_NUMBER = 1; - private java.lang.Object pattern_; - public boolean hasPattern() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getPattern() { - java.lang.Object ref = pattern_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - pattern_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getPatternBytes() { - java.lang.Object ref = pattern_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - pattern_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required int32 patternFlags = 2; - public static final int PATTERNFLAGS_FIELD_NUMBER = 2; - private int patternFlags_; - public boolean hasPatternFlags() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getPatternFlags() { - return patternFlags_; - } - - // required string charset = 3; - public static final int CHARSET_FIELD_NUMBER = 3; - private java.lang.Object charset_; - public boolean hasCharset() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getCharset() { - java.lang.Object ref = charset_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - charset_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getCharsetBytes() { - java.lang.Object ref = charset_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - charset_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - pattern_ = ""; - patternFlags_ = 0; - charset_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPattern()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasPatternFlags()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCharset()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getPatternBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, patternFlags_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getCharsetBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getPatternBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(2, patternFlags_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getCharsetBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) obj; - - boolean result = true; - result = result && (hasPattern() == other.hasPattern()); - if (hasPattern()) { - result = result && getPattern() - .equals(other.getPattern()); - } - result = result && (hasPatternFlags() == other.hasPatternFlags()); - if (hasPatternFlags()) { - result = result && (getPatternFlags() - == other.getPatternFlags()); - } - result = result && (hasCharset() == other.hasCharset()); - if (hasCharset()) { - result = result && getCharset() - .equals(other.getCharset()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPattern()) { - hash = (37 * hash) + PATTERN_FIELD_NUMBER; - hash = (53 * hash) + getPattern().hashCode(); - } - if (hasPatternFlags()) { - hash = (37 * hash) + PATTERNFLAGS_FIELD_NUMBER; - hash = (53 * hash) + getPatternFlags(); - } - if (hasCharset()) { - hash = (37 * hash) + CHARSET_FIELD_NUMBER; - hash = (53 * hash) + getCharset().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_RegexStringComparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - pattern_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - patternFlags_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - charset_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.pattern_ = pattern_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.patternFlags_ = patternFlags_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.charset_ = charset_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.getDefaultInstance()) return this; - if (other.hasPattern()) { - setPattern(other.getPattern()); - } - if (other.hasPatternFlags()) { - setPatternFlags(other.getPatternFlags()); - } - if (other.hasCharset()) { - setCharset(other.getCharset()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPattern()) { - - return false; - } - if (!hasPatternFlags()) { - - return false; - } - if (!hasCharset()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - pattern_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - patternFlags_ = input.readInt32(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - charset_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string pattern = 1; - private java.lang.Object pattern_ = ""; - public boolean hasPattern() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getPattern() { - java.lang.Object ref = pattern_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - pattern_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setPattern(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - pattern_ = value; - onChanged(); - return this; - } - public Builder clearPattern() { - bitField0_ = (bitField0_ & ~0x00000001); - pattern_ = getDefaultInstance().getPattern(); - onChanged(); - return this; - } - void setPattern(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - pattern_ = value; - onChanged(); - } - - // required int32 patternFlags = 2; - private int patternFlags_ ; - public boolean hasPatternFlags() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getPatternFlags() { - return patternFlags_; - } - public Builder setPatternFlags(int value) { - bitField0_ |= 0x00000002; - patternFlags_ = value; - onChanged(); - return this; - } - public Builder clearPatternFlags() { - bitField0_ = (bitField0_ & ~0x00000002); - patternFlags_ = 0; - onChanged(); - return this; - } - - // required string charset = 3; - private java.lang.Object charset_ = ""; - public boolean hasCharset() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getCharset() { - java.lang.Object ref = charset_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - charset_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setCharset(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - charset_ = value; - onChanged(); - return this; - } - public Builder clearCharset() { - bitField0_ = (bitField0_ & ~0x00000004); - charset_ = getDefaultInstance().getCharset(); - onChanged(); - return this; - } - void setCharset(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; - charset_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:RegexStringComparator) - } - - static { - defaultInstance = new RegexStringComparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegexStringComparator) - } - - public interface SubstringComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string substr = 1; - boolean hasSubstr(); - String getSubstr(); - } - public static final class SubstringComparator extends - com.google.protobuf.GeneratedMessage - implements SubstringComparatorOrBuilder { - // Use SubstringComparator.newBuilder() to construct. - private SubstringComparator(Builder builder) { - super(builder); - } - private SubstringComparator(boolean noInit) {} - - private static final SubstringComparator defaultInstance; - public static SubstringComparator getDefaultInstance() { - return defaultInstance; - } - - public SubstringComparator getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable; - } - - private int bitField0_; - // required string substr = 1; - public static final int SUBSTR_FIELD_NUMBER = 1; - private java.lang.Object substr_; - public boolean hasSubstr() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getSubstr() { - java.lang.Object ref = substr_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - substr_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getSubstrBytes() { - java.lang.Object ref = substr_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - substr_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - substr_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasSubstr()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getSubstrBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getSubstrBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) obj; - - boolean result = true; - result = result && (hasSubstr() == other.hasSubstr()); - if (hasSubstr()) { - result = result && getSubstr() - .equals(other.getSubstr()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSubstr()) { - hash = (37 * hash) + SUBSTR_FIELD_NUMBER; - hash = (53 * hash) + getSubstr().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparatorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_SubstringComparator_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - substr_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator build() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.substr_ = substr_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.getDefaultInstance()) return this; - if (other.hasSubstr()) { - setSubstr(other.getSubstr()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasSubstr()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - substr_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string substr = 1; - private java.lang.Object substr_ = ""; - public boolean hasSubstr() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getSubstr() { - java.lang.Object ref = substr_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - substr_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setSubstr(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - substr_ = value; - onChanged(); - return this; - } - public Builder clearSubstr() { - bitField0_ = (bitField0_ & ~0x00000001); - substr_ = getDefaultInstance().getSubstr(); - onChanged(); - return this; - } - void setSubstr(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - substr_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:SubstringComparator) - } - - static { - defaultInstance = new SubstringComparator(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SubstringComparator) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Comparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Comparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ByteArrayComparable_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ByteArrayComparable_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BinaryComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BinaryComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BinaryPrefixComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BinaryPrefixComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BitComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BitComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_NullComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_NullComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegexStringComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegexStringComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SubstringComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SubstringComparator_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\020Comparator.proto\"8\n\nComparator\022\014\n\004name" + - "\030\001 \002(\t\022\034\n\024serializedComparator\030\002 \001(\014\"$\n\023" + - "ByteArrayComparable\022\r\n\005value\030\001 \001(\014\"<\n\020Bi" + - "naryComparator\022(\n\ncomparable\030\001 \002(\0132\024.Byt" + - "eArrayComparable\"B\n\026BinaryPrefixComparat" + - "or\022(\n\ncomparable\030\001 \002(\0132\024.ByteArrayCompar" + - "able\"\215\001\n\rBitComparator\022(\n\ncomparable\030\001 \002" + - "(\0132\024.ByteArrayComparable\022+\n\tbitwiseOp\030\002 " + - "\002(\0162\030.BitComparator.BitwiseOp\"%\n\tBitwise" + - "Op\022\007\n\003AND\020\001\022\006\n\002OR\020\002\022\007\n\003XOR\020\003\"\020\n\016NullComp", - "arator\"O\n\025RegexStringComparator\022\017\n\007patte" + - "rn\030\001 \002(\t\022\024\n\014patternFlags\030\002 \002(\005\022\017\n\007charse" + - "t\030\003 \002(\t\"%\n\023SubstringComparator\022\016\n\006substr" + - "\030\001 \002(\tBF\n*org.apache.hadoop.hbase.protob" + - "uf.generatedB\020ComparatorProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_Comparator_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_Comparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Comparator_descriptor, - new java.lang.String[] { "Name", "SerializedComparator", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder.class); - internal_static_ByteArrayComparable_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_ByteArrayComparable_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ByteArrayComparable_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder.class); - internal_static_BinaryComparator_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_BinaryComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BinaryComparator_descriptor, - new java.lang.String[] { "Comparable", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryComparator.Builder.class); - internal_static_BinaryPrefixComparator_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_BinaryPrefixComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BinaryPrefixComparator_descriptor, - new java.lang.String[] { "Comparable", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.Builder.class); - internal_static_BitComparator_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_BitComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BitComparator_descriptor, - new java.lang.String[] { "Comparable", "BitwiseOp", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); - internal_static_NullComparator_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_NullComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_NullComparator_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.NullComparator.Builder.class); - internal_static_RegexStringComparator_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_RegexStringComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegexStringComparator_descriptor, - new java.lang.String[] { "Pattern", "PatternFlags", "Charset", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.RegexStringComparator.Builder.class); - internal_static_SubstringComparator_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_SubstringComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SubstringComparator_descriptor, - new java.lang.String[] { "Substr", }, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.class, - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.SubstringComparator.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java deleted file mode 100644 index 79d13c1..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java +++ /dev/null @@ -1,1018 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: FS.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class FSProtos { - private FSProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface HBaseVersionFileContentOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string version = 1; - boolean hasVersion(); - String getVersion(); - } - public static final class HBaseVersionFileContent extends - com.google.protobuf.GeneratedMessage - implements HBaseVersionFileContentOrBuilder { - // Use HBaseVersionFileContent.newBuilder() to construct. - private HBaseVersionFileContent(Builder builder) { - super(builder); - } - private HBaseVersionFileContent(boolean noInit) {} - - private static final HBaseVersionFileContent defaultInstance; - public static HBaseVersionFileContent getDefaultInstance() { - return defaultInstance; - } - - public HBaseVersionFileContent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; - } - - private int bitField0_; - // required string version = 1; - public static final int VERSION_FIELD_NUMBER = 1; - private java.lang.Object version_; - public boolean hasVersion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getVersion() { - java.lang.Object ref = version_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - version_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - version_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasVersion()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getVersionBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getVersionBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) obj; - - boolean result = true; - result = result && (hasVersion() == other.hasVersion()); - if (hasVersion()) { - result = result && getVersion() - .equals(other.getVersion()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasVersion()) { - hash = (37 * hash) + VERSION_FIELD_NUMBER; - hash = (53 * hash) + getVersion().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - version_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent build() { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.version_ = version_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) return this; - if (other.hasVersion()) { - setVersion(other.getVersion()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasVersion()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - version_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string version = 1; - private java.lang.Object version_ = ""; - public boolean hasVersion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getVersion() { - java.lang.Object ref = version_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - version_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setVersion(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - version_ = value; - onChanged(); - return this; - } - public Builder clearVersion() { - bitField0_ = (bitField0_ & ~0x00000001); - version_ = getDefaultInstance().getVersion(); - onChanged(); - return this; - } - void setVersion(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - version_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:HBaseVersionFileContent) - } - - static { - defaultInstance = new HBaseVersionFileContent(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:HBaseVersionFileContent) - } - - public interface ReferenceOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes splitkey = 1; - boolean hasSplitkey(); - com.google.protobuf.ByteString getSplitkey(); - - // required .Reference.Range range = 2; - boolean hasRange(); - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange(); - } - public static final class Reference extends - com.google.protobuf.GeneratedMessage - implements ReferenceOrBuilder { - // Use Reference.newBuilder() to construct. - private Reference(Builder builder) { - super(builder); - } - private Reference(boolean noInit) {} - - private static final Reference defaultInstance; - public static Reference getDefaultInstance() { - return defaultInstance; - } - - public Reference getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable; - } - - public enum Range - implements com.google.protobuf.ProtocolMessageEnum { - TOP(0, 0), - BOTTOM(1, 1), - ; - - public static final int TOP_VALUE = 0; - public static final int BOTTOM_VALUE = 1; - - - public final int getNumber() { return value; } - - public static Range valueOf(int value) { - switch (value) { - case 0: return TOP; - case 1: return BOTTOM; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Range findValueByNumber(int number) { - return Range.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor().getEnumTypes().get(0); - } - - private static final Range[] VALUES = { - TOP, BOTTOM, - }; - - public static Range valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Range(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Reference.Range) - } - - private int bitField0_; - // required bytes splitkey = 1; - public static final int SPLITKEY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString splitkey_; - public boolean hasSplitkey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getSplitkey() { - return splitkey_; - } - - // required .Reference.Range range = 2; - public static final int RANGE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_; - public boolean hasRange() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() { - return range_; - } - - private void initFields() { - splitkey_ = com.google.protobuf.ByteString.EMPTY; - range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasSplitkey()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRange()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, splitkey_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, range_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, splitkey_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, range_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference other = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) obj; - - boolean result = true; - result = result && (hasSplitkey() == other.hasSplitkey()); - if (hasSplitkey()) { - result = result && getSplitkey() - .equals(other.getSplitkey()); - } - result = result && (hasRange() == other.hasRange()); - if (hasRange()) { - result = result && - (getRange() == other.getRange()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSplitkey()) { - hash = (37 * hash) + SPLITKEY_FIELD_NUMBER; - hash = (53 * hash) + getSplitkey().hashCode(); - } - if (hasRange()) { - hash = (37 * hash) + RANGE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getRange()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - splitkey_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference build() { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.splitkey_ = splitkey_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.range_ = range_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance()) return this; - if (other.hasSplitkey()) { - setSplitkey(other.getSplitkey()); - } - if (other.hasRange()) { - setRange(other.getRange()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasSplitkey()) { - - return false; - } - if (!hasRange()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - splitkey_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - range_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required bytes splitkey = 1; - private com.google.protobuf.ByteString splitkey_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSplitkey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getSplitkey() { - return splitkey_; - } - public Builder setSplitkey(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - splitkey_ = value; - onChanged(); - return this; - } - public Builder clearSplitkey() { - bitField0_ = (bitField0_ & ~0x00000001); - splitkey_ = getDefaultInstance().getSplitkey(); - onChanged(); - return this; - } - - // required .Reference.Range range = 2; - private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; - public boolean hasRange() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() { - return range_; - } - public Builder setRange(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - range_ = value; - onChanged(); - return this; - } - public Builder clearRange() { - bitField0_ = (bitField0_ & ~0x00000002); - range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Reference) - } - - static { - defaultInstance = new Reference(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Reference) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_HBaseVersionFileContent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_HBaseVersionFileContent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Reference_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Reference_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\010FS.proto\"*\n\027HBaseVersionFileContent\022\017\n" + - "\007version\030\001 \002(\t\"\\\n\tReference\022\020\n\010splitkey\030" + - "\001 \002(\014\022\037\n\005range\030\002 \002(\0162\020.Reference.Range\"\034" + - "\n\005Range\022\007\n\003TOP\020\000\022\n\n\006BOTTOM\020\001B;\n*org.apac" + - "he.hadoop.hbase.protobuf.generatedB\010FSPr" + - "otosH\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_HBaseVersionFileContent_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_HBaseVersionFileContent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_HBaseVersionFileContent_descriptor, - new java.lang.String[] { "Version", }, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); - internal_static_Reference_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_Reference_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Reference_descriptor, - new java.lang.String[] { "Splitkey", "Range", }, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, - org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java deleted file mode 100644 index 8088751..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java +++ /dev/null @@ -1,12942 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Filter.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class FilterProtos { - private FilterProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ColumnCountGetFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int32 limit = 1; - boolean hasLimit(); - int getLimit(); - } - public static final class ColumnCountGetFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnCountGetFilterOrBuilder { - // Use ColumnCountGetFilter.newBuilder() to construct. - private ColumnCountGetFilter(Builder builder) { - super(builder); - } - private ColumnCountGetFilter(boolean noInit) {} - - private static final ColumnCountGetFilter defaultInstance; - public static ColumnCountGetFilter getDefaultInstance() { - return defaultInstance; - } - - public ColumnCountGetFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable; - } - - private int bitField0_; - // required int32 limit = 1; - public static final int LIMIT_FIELD_NUMBER = 1; - private int limit_; - public boolean hasLimit() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getLimit() { - return limit_; - } - - private void initFields() { - limit_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLimit()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt32(1, limit_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(1, limit_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj; - - boolean result = true; - result = result && (hasLimit() == other.hasLimit()); - if (hasLimit()) { - result = result && (getLimit() - == other.getLimit()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLimit()) { - hash = (37 * hash) + LIMIT_FIELD_NUMBER; - hash = (53 * hash) + getLimit(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - limit_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.limit_ = limit_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this; - if (other.hasLimit()) { - setLimit(other.getLimit()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLimit()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - limit_ = input.readInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required int32 limit = 1; - private int limit_ ; - public boolean hasLimit() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getLimit() { - return limit_; - } - public Builder setLimit(int value) { - bitField0_ |= 0x00000001; - limit_ = value; - onChanged(); - return this; - } - public Builder clearLimit() { - bitField0_ = (bitField0_ & ~0x00000001); - limit_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnCountGetFilter) - } - - static { - defaultInstance = new ColumnCountGetFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnCountGetFilter) - } - - public interface ColumnPaginationFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int32 limit = 1; - boolean hasLimit(); - int getLimit(); - - // optional int32 offset = 2; - boolean hasOffset(); - int getOffset(); - } - public static final class ColumnPaginationFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnPaginationFilterOrBuilder { - // Use ColumnPaginationFilter.newBuilder() to construct. - private ColumnPaginationFilter(Builder builder) { - super(builder); - } - private ColumnPaginationFilter(boolean noInit) {} - - private static final ColumnPaginationFilter defaultInstance; - public static ColumnPaginationFilter getDefaultInstance() { - return defaultInstance; - } - - public ColumnPaginationFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable; - } - - private int bitField0_; - // required int32 limit = 1; - public static final int LIMIT_FIELD_NUMBER = 1; - private int limit_; - public boolean hasLimit() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getLimit() { - return limit_; - } - - // optional int32 offset = 2; - public static final int OFFSET_FIELD_NUMBER = 2; - private int offset_; - public boolean hasOffset() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getOffset() { - return offset_; - } - - private void initFields() { - limit_ = 0; - offset_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLimit()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt32(1, limit_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, offset_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(1, limit_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(2, offset_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj; - - boolean result = true; - result = result && (hasLimit() == other.hasLimit()); - if (hasLimit()) { - result = result && (getLimit() - == other.getLimit()); - } - result = result && (hasOffset() == other.hasOffset()); - if (hasOffset()) { - result = result && (getOffset() - == other.getOffset()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLimit()) { - hash = (37 * hash) + LIMIT_FIELD_NUMBER; - hash = (53 * hash) + getLimit(); - } - if (hasOffset()) { - hash = (37 * hash) + OFFSET_FIELD_NUMBER; - hash = (53 * hash) + getOffset(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - limit_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - offset_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.limit_ = limit_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.offset_ = offset_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this; - if (other.hasLimit()) { - setLimit(other.getLimit()); - } - if (other.hasOffset()) { - setOffset(other.getOffset()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLimit()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - limit_ = input.readInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - offset_ = input.readInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required int32 limit = 1; - private int limit_ ; - public boolean hasLimit() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getLimit() { - return limit_; - } - public Builder setLimit(int value) { - bitField0_ |= 0x00000001; - limit_ = value; - onChanged(); - return this; - } - public Builder clearLimit() { - bitField0_ = (bitField0_ & ~0x00000001); - limit_ = 0; - onChanged(); - return this; - } - - // optional int32 offset = 2; - private int offset_ ; - public boolean hasOffset() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getOffset() { - return offset_; - } - public Builder setOffset(int value) { - bitField0_ |= 0x00000002; - offset_ = value; - onChanged(); - return this; - } - public Builder clearOffset() { - bitField0_ = (bitField0_ & ~0x00000002); - offset_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnPaginationFilter) - } - - static { - defaultInstance = new ColumnPaginationFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnPaginationFilter) - } - - public interface ColumnPrefixFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes prefix = 1; - boolean hasPrefix(); - com.google.protobuf.ByteString getPrefix(); - } - public static final class ColumnPrefixFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnPrefixFilterOrBuilder { - // Use ColumnPrefixFilter.newBuilder() to construct. - private ColumnPrefixFilter(Builder builder) { - super(builder); - } - private ColumnPrefixFilter(boolean noInit) {} - - private static final ColumnPrefixFilter defaultInstance; - public static ColumnPrefixFilter getDefaultInstance() { - return defaultInstance; - } - - public ColumnPrefixFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable; - } - - private int bitField0_; - // required bytes prefix = 1; - public static final int PREFIX_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString prefix_; - public boolean hasPrefix() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getPrefix() { - return prefix_; - } - - private void initFields() { - prefix_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPrefix()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, prefix_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, prefix_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj; - - boolean result = true; - result = result && (hasPrefix() == other.hasPrefix()); - if (hasPrefix()) { - result = result && getPrefix() - .equals(other.getPrefix()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPrefix()) { - hash = (37 * hash) + PREFIX_FIELD_NUMBER; - hash = (53 * hash) + getPrefix().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - prefix_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.prefix_ = prefix_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this; - if (other.hasPrefix()) { - setPrefix(other.getPrefix()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPrefix()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - prefix_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes prefix = 1; - private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasPrefix() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getPrefix() { - return prefix_; - } - public Builder setPrefix(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - prefix_ = value; - onChanged(); - return this; - } - public Builder clearPrefix() { - bitField0_ = (bitField0_ & ~0x00000001); - prefix_ = getDefaultInstance().getPrefix(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnPrefixFilter) - } - - static { - defaultInstance = new ColumnPrefixFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnPrefixFilter) - } - - public interface ColumnRangeFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes minColumn = 1; - boolean hasMinColumn(); - com.google.protobuf.ByteString getMinColumn(); - - // optional bool minColumnInclusive = 2; - boolean hasMinColumnInclusive(); - boolean getMinColumnInclusive(); - - // optional bytes maxColumn = 3; - boolean hasMaxColumn(); - com.google.protobuf.ByteString getMaxColumn(); - - // optional bool maxColumnInclusive = 4; - boolean hasMaxColumnInclusive(); - boolean getMaxColumnInclusive(); - } - public static final class ColumnRangeFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnRangeFilterOrBuilder { - // Use ColumnRangeFilter.newBuilder() to construct. - private ColumnRangeFilter(Builder builder) { - super(builder); - } - private ColumnRangeFilter(boolean noInit) {} - - private static final ColumnRangeFilter defaultInstance; - public static ColumnRangeFilter getDefaultInstance() { - return defaultInstance; - } - - public ColumnRangeFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes minColumn = 1; - public static final int MINCOLUMN_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString minColumn_; - public boolean hasMinColumn() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getMinColumn() { - return minColumn_; - } - - // optional bool minColumnInclusive = 2; - public static final int MINCOLUMNINCLUSIVE_FIELD_NUMBER = 2; - private boolean minColumnInclusive_; - public boolean hasMinColumnInclusive() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMinColumnInclusive() { - return minColumnInclusive_; - } - - // optional bytes maxColumn = 3; - public static final int MAXCOLUMN_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString maxColumn_; - public boolean hasMaxColumn() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getMaxColumn() { - return maxColumn_; - } - - // optional bool maxColumnInclusive = 4; - public static final int MAXCOLUMNINCLUSIVE_FIELD_NUMBER = 4; - private boolean maxColumnInclusive_; - public boolean hasMaxColumnInclusive() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getMaxColumnInclusive() { - return maxColumnInclusive_; - } - - private void initFields() { - minColumn_ = com.google.protobuf.ByteString.EMPTY; - minColumnInclusive_ = false; - maxColumn_ = com.google.protobuf.ByteString.EMPTY; - maxColumnInclusive_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, minColumn_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, minColumnInclusive_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, maxColumn_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(4, maxColumnInclusive_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, minColumn_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, minColumnInclusive_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, maxColumn_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, maxColumnInclusive_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj; - - boolean result = true; - result = result && (hasMinColumn() == other.hasMinColumn()); - if (hasMinColumn()) { - result = result && getMinColumn() - .equals(other.getMinColumn()); - } - result = result && (hasMinColumnInclusive() == other.hasMinColumnInclusive()); - if (hasMinColumnInclusive()) { - result = result && (getMinColumnInclusive() - == other.getMinColumnInclusive()); - } - result = result && (hasMaxColumn() == other.hasMaxColumn()); - if (hasMaxColumn()) { - result = result && getMaxColumn() - .equals(other.getMaxColumn()); - } - result = result && (hasMaxColumnInclusive() == other.hasMaxColumnInclusive()); - if (hasMaxColumnInclusive()) { - result = result && (getMaxColumnInclusive() - == other.getMaxColumnInclusive()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMinColumn()) { - hash = (37 * hash) + MINCOLUMN_FIELD_NUMBER; - hash = (53 * hash) + getMinColumn().hashCode(); - } - if (hasMinColumnInclusive()) { - hash = (37 * hash) + MINCOLUMNINCLUSIVE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMinColumnInclusive()); - } - if (hasMaxColumn()) { - hash = (37 * hash) + MAXCOLUMN_FIELD_NUMBER; - hash = (53 * hash) + getMaxColumn().hashCode(); - } - if (hasMaxColumnInclusive()) { - hash = (37 * hash) + MAXCOLUMNINCLUSIVE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMaxColumnInclusive()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - minColumn_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - minColumnInclusive_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - maxColumn_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - maxColumnInclusive_ = false; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.minColumn_ = minColumn_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.minColumnInclusive_ = minColumnInclusive_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.maxColumn_ = maxColumn_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.maxColumnInclusive_ = maxColumnInclusive_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this; - if (other.hasMinColumn()) { - setMinColumn(other.getMinColumn()); - } - if (other.hasMinColumnInclusive()) { - setMinColumnInclusive(other.getMinColumnInclusive()); - } - if (other.hasMaxColumn()) { - setMaxColumn(other.getMaxColumn()); - } - if (other.hasMaxColumnInclusive()) { - setMaxColumnInclusive(other.getMaxColumnInclusive()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - minColumn_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - minColumnInclusive_ = input.readBool(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - maxColumn_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - maxColumnInclusive_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes minColumn = 1; - private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasMinColumn() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getMinColumn() { - return minColumn_; - } - public Builder setMinColumn(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - minColumn_ = value; - onChanged(); - return this; - } - public Builder clearMinColumn() { - bitField0_ = (bitField0_ & ~0x00000001); - minColumn_ = getDefaultInstance().getMinColumn(); - onChanged(); - return this; - } - - // optional bool minColumnInclusive = 2; - private boolean minColumnInclusive_ ; - public boolean hasMinColumnInclusive() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMinColumnInclusive() { - return minColumnInclusive_; - } - public Builder setMinColumnInclusive(boolean value) { - bitField0_ |= 0x00000002; - minColumnInclusive_ = value; - onChanged(); - return this; - } - public Builder clearMinColumnInclusive() { - bitField0_ = (bitField0_ & ~0x00000002); - minColumnInclusive_ = false; - onChanged(); - return this; - } - - // optional bytes maxColumn = 3; - private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasMaxColumn() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getMaxColumn() { - return maxColumn_; - } - public Builder setMaxColumn(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - maxColumn_ = value; - onChanged(); - return this; - } - public Builder clearMaxColumn() { - bitField0_ = (bitField0_ & ~0x00000004); - maxColumn_ = getDefaultInstance().getMaxColumn(); - onChanged(); - return this; - } - - // optional bool maxColumnInclusive = 4; - private boolean maxColumnInclusive_ ; - public boolean hasMaxColumnInclusive() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getMaxColumnInclusive() { - return maxColumnInclusive_; - } - public Builder setMaxColumnInclusive(boolean value) { - bitField0_ |= 0x00000008; - maxColumnInclusive_ = value; - onChanged(); - return this; - } - public Builder clearMaxColumnInclusive() { - bitField0_ = (bitField0_ & ~0x00000008); - maxColumnInclusive_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnRangeFilter) - } - - static { - defaultInstance = new ColumnRangeFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnRangeFilter) - } - - public interface CompareFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .CompareType compareOp = 1; - boolean hasCompareOp(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); - - // optional .Comparator comparator = 2; - boolean hasComparator(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); - } - public static final class CompareFilter extends - com.google.protobuf.GeneratedMessage - implements CompareFilterOrBuilder { - // Use CompareFilter.newBuilder() to construct. - private CompareFilter(Builder builder) { - super(builder); - } - private CompareFilter(boolean noInit) {} - - private static final CompareFilter defaultInstance; - public static CompareFilter getDefaultInstance() { - return defaultInstance; - } - - public CompareFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .CompareType compareOp = 1; - public static final int COMPAREOP_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; - public boolean hasCompareOp() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; - } - - // optional .Comparator comparator = 2; - public static final int COMPARATOR_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { - return comparator_; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - return comparator_; - } - - private void initFields() { - compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareOp()) { - memoizedIsInitialized = 0; - return false; - } - if (hasComparator()) { - if (!getComparator().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, compareOp_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, comparator_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, compareOp_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, comparator_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj; - - boolean result = true; - result = result && (hasCompareOp() == other.hasCompareOp()); - if (hasCompareOp()) { - result = result && - (getCompareOp() == other.getCompareOp()); - } - result = result && (hasComparator() == other.hasComparator()); - if (hasComparator()) { - result = result && getComparator() - .equals(other.getComparator()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCompareOp()) { - hash = (37 * hash) + COMPAREOP_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareOp()); - } - if (hasComparator()) { - hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; - hash = (53 * hash) + getComparator().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getComparatorFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - bitField0_ = (bitField0_ & ~0x00000001); - if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } else { - comparatorBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.compareOp_ = compareOp_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (comparatorBuilder_ == null) { - result.comparator_ = comparator_; - } else { - result.comparator_ = comparatorBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this; - if (other.hasCompareOp()) { - setCompareOp(other.getCompareOp()); - } - if (other.hasComparator()) { - mergeComparator(other.getComparator()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareOp()) { - - return false; - } - if (hasComparator()) { - if (!getComparator().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - compareOp_ = value; - } - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); - if (hasComparator()) { - subBuilder.mergeFrom(getComparator()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparator(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .CompareType compareOp = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - public boolean hasCompareOp() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; - } - public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - compareOp_ = value; - onChanged(); - return this; - } - public Builder clearCompareOp() { - bitField0_ = (bitField0_ & ~0x00000001); - compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - onChanged(); - return this; - } - - // optional .Comparator comparator = 2; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { - if (comparatorBuilder_ == null) { - return comparator_; - } else { - return comparatorBuilder_.getMessage(); - } - } - public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { - if (comparatorBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - comparator_ = value; - onChanged(); - } else { - comparatorBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setComparator( - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { - if (comparatorBuilder_ == null) { - comparator_ = builderForValue.build(); - onChanged(); - } else { - comparatorBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { - if (comparatorBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { - comparator_ = - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); - } else { - comparator_ = value; - } - onChanged(); - } else { - comparatorBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearComparator() { - if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - onChanged(); - } else { - comparatorBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getComparatorFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - if (comparatorBuilder_ != null) { - return comparatorBuilder_.getMessageOrBuilder(); - } else { - return comparator_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> - getComparatorFieldBuilder() { - if (comparatorBuilder_ == null) { - comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( - comparator_, - getParentForChildren(), - isClean()); - comparator_ = null; - } - return comparatorBuilder_; - } - - // @@protoc_insertion_point(builder_scope:CompareFilter) - } - - static { - defaultInstance = new CompareFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CompareFilter) - } - - public interface DependentColumnFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .CompareFilter compareFilter = 1; - boolean hasCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - - // optional bytes columnFamily = 2; - boolean hasColumnFamily(); - com.google.protobuf.ByteString getColumnFamily(); - - // optional bytes columnQualifier = 3; - boolean hasColumnQualifier(); - com.google.protobuf.ByteString getColumnQualifier(); - - // optional bool dropDependentColumn = 4; - boolean hasDropDependentColumn(); - boolean getDropDependentColumn(); - } - public static final class DependentColumnFilter extends - com.google.protobuf.GeneratedMessage - implements DependentColumnFilterOrBuilder { - // Use DependentColumnFilter.newBuilder() to construct. - private DependentColumnFilter(Builder builder) { - super(builder); - } - private DependentColumnFilter(boolean noInit) {} - - private static final DependentColumnFilter defaultInstance; - public static DependentColumnFilter getDefaultInstance() { - return defaultInstance; - } - - public DependentColumnFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .CompareFilter compareFilter = 1; - public static final int COMPAREFILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; - } - - // optional bytes columnFamily = 2; - public static final int COLUMNFAMILY_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString columnFamily_; - public boolean hasColumnFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getColumnFamily() { - return columnFamily_; - } - - // optional bytes columnQualifier = 3; - public static final int COLUMNQUALIFIER_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString columnQualifier_; - public boolean hasColumnQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getColumnQualifier() { - return columnQualifier_; - } - - // optional bool dropDependentColumn = 4; - public static final int DROPDEPENDENTCOLUMN_FIELD_NUMBER = 4; - private boolean dropDependentColumn_; - public boolean hasDropDependentColumn() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getDropDependentColumn() { - return dropDependentColumn_; - } - - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - dropDependentColumn_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCompareFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, columnFamily_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, columnQualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(4, dropDependentColumn_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, columnFamily_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, columnQualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, dropDependentColumn_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj; - - boolean result = true; - result = result && (hasCompareFilter() == other.hasCompareFilter()); - if (hasCompareFilter()) { - result = result && getCompareFilter() - .equals(other.getCompareFilter()); - } - result = result && (hasColumnFamily() == other.hasColumnFamily()); - if (hasColumnFamily()) { - result = result && getColumnFamily() - .equals(other.getColumnFamily()); - } - result = result && (hasColumnQualifier() == other.hasColumnQualifier()); - if (hasColumnQualifier()) { - result = result && getColumnQualifier() - .equals(other.getColumnQualifier()); - } - result = result && (hasDropDependentColumn() == other.hasDropDependentColumn()); - if (hasDropDependentColumn()) { - result = result && (getDropDependentColumn() - == other.getDropDependentColumn()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCompareFilter()) { - hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; - hash = (53 * hash) + getCompareFilter().hashCode(); - } - if (hasColumnFamily()) { - hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamily().hashCode(); - } - if (hasColumnQualifier()) { - hash = (37 * hash) + COLUMNQUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getColumnQualifier().hashCode(); - } - if (hasDropDependentColumn()) { - hash = (37 * hash) + DROPDEPENDENTCOLUMN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDropDependentColumn()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getCompareFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - dropDependentColumn_ = false; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (compareFilterBuilder_ == null) { - result.compareFilter_ = compareFilter_; - } else { - result.compareFilter_ = compareFilterBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.columnFamily_ = columnFamily_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.columnQualifier_ = columnQualifier_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.dropDependentColumn_ = dropDependentColumn_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this; - if (other.hasCompareFilter()) { - mergeCompareFilter(other.getCompareFilter()); - } - if (other.hasColumnFamily()) { - setColumnFamily(other.getColumnFamily()); - } - if (other.hasColumnQualifier()) { - setColumnQualifier(other.getColumnQualifier()); - } - if (other.hasDropDependentColumn()) { - setDropDependentColumn(other.getDropDependentColumn()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareFilter()) { - - return false; - } - if (!getCompareFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - columnFamily_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - columnQualifier_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - dropDependentColumn_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .CompareFilter compareFilter = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - if (compareFilterBuilder_ == null) { - return compareFilter_; - } else { - return compareFilterBuilder_.getMessage(); - } - } - public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - compareFilter_ = value; - onChanged(); - } else { - compareFilterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setCompareFilter( - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { - if (compareFilterBuilder_ == null) { - compareFilter_ = builderForValue.build(); - onChanged(); - } else { - compareFilterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { - compareFilter_ = - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); - } else { - compareFilter_ = value; - } - onChanged(); - } else { - compareFilterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearCompareFilter() { - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - onChanged(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getCompareFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - if (compareFilterBuilder_ != null) { - return compareFilterBuilder_.getMessageOrBuilder(); - } else { - return compareFilter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> - getCompareFilterFieldBuilder() { - if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, - getParentForChildren(), - isClean()); - compareFilter_ = null; - } - return compareFilterBuilder_; - } - - // optional bytes columnFamily = 2; - private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasColumnFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getColumnFamily() { - return columnFamily_; - } - public Builder setColumnFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - columnFamily_ = value; - onChanged(); - return this; - } - public Builder clearColumnFamily() { - bitField0_ = (bitField0_ & ~0x00000002); - columnFamily_ = getDefaultInstance().getColumnFamily(); - onChanged(); - return this; - } - - // optional bytes columnQualifier = 3; - private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasColumnQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getColumnQualifier() { - return columnQualifier_; - } - public Builder setColumnQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - columnQualifier_ = value; - onChanged(); - return this; - } - public Builder clearColumnQualifier() { - bitField0_ = (bitField0_ & ~0x00000004); - columnQualifier_ = getDefaultInstance().getColumnQualifier(); - onChanged(); - return this; - } - - // optional bool dropDependentColumn = 4; - private boolean dropDependentColumn_ ; - public boolean hasDropDependentColumn() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getDropDependentColumn() { - return dropDependentColumn_; - } - public Builder setDropDependentColumn(boolean value) { - bitField0_ |= 0x00000008; - dropDependentColumn_ = value; - onChanged(); - return this; - } - public Builder clearDropDependentColumn() { - bitField0_ = (bitField0_ & ~0x00000008); - dropDependentColumn_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:DependentColumnFilter) - } - - static { - defaultInstance = new DependentColumnFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DependentColumnFilter) - } - - public interface FamilyFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .CompareFilter compareFilter = 1; - boolean hasCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - } - public static final class FamilyFilter extends - com.google.protobuf.GeneratedMessage - implements FamilyFilterOrBuilder { - // Use FamilyFilter.newBuilder() to construct. - private FamilyFilter(Builder builder) { - super(builder); - } - private FamilyFilter(boolean noInit) {} - - private static final FamilyFilter defaultInstance; - public static FamilyFilter getDefaultInstance() { - return defaultInstance; - } - - public FamilyFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .CompareFilter compareFilter = 1; - public static final int COMPAREFILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; - } - - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCompareFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj; - - boolean result = true; - result = result && (hasCompareFilter() == other.hasCompareFilter()); - if (hasCompareFilter()) { - result = result && getCompareFilter() - .equals(other.getCompareFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCompareFilter()) { - hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; - hash = (53 * hash) + getCompareFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getCompareFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (compareFilterBuilder_ == null) { - result.compareFilter_ = compareFilter_; - } else { - result.compareFilter_ = compareFilterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this; - if (other.hasCompareFilter()) { - mergeCompareFilter(other.getCompareFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareFilter()) { - - return false; - } - if (!getCompareFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .CompareFilter compareFilter = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - if (compareFilterBuilder_ == null) { - return compareFilter_; - } else { - return compareFilterBuilder_.getMessage(); - } - } - public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - compareFilter_ = value; - onChanged(); - } else { - compareFilterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setCompareFilter( - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { - if (compareFilterBuilder_ == null) { - compareFilter_ = builderForValue.build(); - onChanged(); - } else { - compareFilterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { - compareFilter_ = - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); - } else { - compareFilter_ = value; - } - onChanged(); - } else { - compareFilterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearCompareFilter() { - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - onChanged(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getCompareFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - if (compareFilterBuilder_ != null) { - return compareFilterBuilder_.getMessageOrBuilder(); - } else { - return compareFilter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> - getCompareFilterFieldBuilder() { - if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, - getParentForChildren(), - isClean()); - compareFilter_ = null; - } - return compareFilterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:FamilyFilter) - } - - static { - defaultInstance = new FamilyFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FamilyFilter) - } - - public interface FilterListOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .FilterList.Operator operator = 1; - boolean hasOperator(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator(); - - // repeated .Filter filters = 2; - java.util.List - getFiltersList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index); - int getFiltersCount(); - java.util.List - getFiltersOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( - int index); - } - public static final class FilterList extends - com.google.protobuf.GeneratedMessage - implements FilterListOrBuilder { - // Use FilterList.newBuilder() to construct. - private FilterList(Builder builder) { - super(builder); - } - private FilterList(boolean noInit) {} - - private static final FilterList defaultInstance; - public static FilterList getDefaultInstance() { - return defaultInstance; - } - - public FilterList getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable; - } - - public enum Operator - implements com.google.protobuf.ProtocolMessageEnum { - MUST_PASS_ALL(0, 1), - MUST_PASS_ONE(1, 2), - ; - - public static final int MUST_PASS_ALL_VALUE = 1; - public static final int MUST_PASS_ONE_VALUE = 2; - - - public final int getNumber() { return value; } - - public static Operator valueOf(int value) { - switch (value) { - case 1: return MUST_PASS_ALL; - case 2: return MUST_PASS_ONE; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Operator findValueByNumber(int number) { - return Operator.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0); - } - - private static final Operator[] VALUES = { - MUST_PASS_ALL, MUST_PASS_ONE, - }; - - public static Operator valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Operator(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:FilterList.Operator) - } - - private int bitField0_; - // required .FilterList.Operator operator = 1; - public static final int OPERATOR_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_; - public boolean hasOperator() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { - return operator_; - } - - // repeated .Filter filters = 2; - public static final int FILTERS_FIELD_NUMBER = 2; - private java.util.List filters_; - public java.util.List getFiltersList() { - return filters_; - } - public java.util.List - getFiltersOrBuilderList() { - return filters_; - } - public int getFiltersCount() { - return filters_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index) { - return filters_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( - int index) { - return filters_.get(index); - } - - private void initFields() { - operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; - filters_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasOperator()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getFiltersCount(); i++) { - if (!getFilters(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, operator_.getNumber()); - } - for (int i = 0; i < filters_.size(); i++) { - output.writeMessage(2, filters_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, operator_.getNumber()); - } - for (int i = 0; i < filters_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, filters_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj; - - boolean result = true; - result = result && (hasOperator() == other.hasOperator()); - if (hasOperator()) { - result = result && - (getOperator() == other.getOperator()); - } - result = result && getFiltersList() - .equals(other.getFiltersList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasOperator()) { - hash = (37 * hash) + OPERATOR_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getOperator()); - } - if (getFiltersCount() > 0) { - hash = (37 * hash) + FILTERS_FIELD_NUMBER; - hash = (53 * hash) + getFiltersList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFiltersFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; - bitField0_ = (bitField0_ & ~0x00000001); - if (filtersBuilder_ == null) { - filters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - filtersBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.operator_ = operator_; - if (filtersBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - filters_ = java.util.Collections.unmodifiableList(filters_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.filters_ = filters_; - } else { - result.filters_ = filtersBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this; - if (other.hasOperator()) { - setOperator(other.getOperator()); - } - if (filtersBuilder_ == null) { - if (!other.filters_.isEmpty()) { - if (filters_.isEmpty()) { - filters_ = other.filters_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFiltersIsMutable(); - filters_.addAll(other.filters_); - } - onChanged(); - } - } else { - if (!other.filters_.isEmpty()) { - if (filtersBuilder_.isEmpty()) { - filtersBuilder_.dispose(); - filtersBuilder_ = null; - filters_ = other.filters_; - bitField0_ = (bitField0_ & ~0x00000002); - filtersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFiltersFieldBuilder() : null; - } else { - filtersBuilder_.addAllMessages(other.filters_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasOperator()) { - - return false; - } - for (int i = 0; i < getFiltersCount(); i++) { - if (!getFilters(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - operator_ = value; - } - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFilters(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .FilterList.Operator operator = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; - public boolean hasOperator() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { - return operator_; - } - public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - operator_ = value; - onChanged(); - return this; - } - public Builder clearOperator() { - bitField0_ = (bitField0_ & ~0x00000001); - operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; - onChanged(); - return this; - } - - // repeated .Filter filters = 2; - private java.util.List filters_ = - java.util.Collections.emptyList(); - private void ensureFiltersIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - filters_ = new java.util.ArrayList(filters_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filtersBuilder_; - - public java.util.List getFiltersList() { - if (filtersBuilder_ == null) { - return java.util.Collections.unmodifiableList(filters_); - } else { - return filtersBuilder_.getMessageList(); - } - } - public int getFiltersCount() { - if (filtersBuilder_ == null) { - return filters_.size(); - } else { - return filtersBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilters(int index) { - if (filtersBuilder_ == null) { - return filters_.get(index); - } else { - return filtersBuilder_.getMessage(index); - } - } - public Builder setFilters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filtersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFiltersIsMutable(); - filters_.set(index, value); - onChanged(); - } else { - filtersBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFilters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filtersBuilder_ == null) { - ensureFiltersIsMutable(); - filters_.set(index, builderForValue.build()); - onChanged(); - } else { - filtersBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filtersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFiltersIsMutable(); - filters_.add(value); - onChanged(); - } else { - filtersBuilder_.addMessage(value); - } - return this; - } - public Builder addFilters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filtersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFiltersIsMutable(); - filters_.add(index, value); - onChanged(); - } else { - filtersBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFilters( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filtersBuilder_ == null) { - ensureFiltersIsMutable(); - filters_.add(builderForValue.build()); - onChanged(); - } else { - filtersBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFilters( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filtersBuilder_ == null) { - ensureFiltersIsMutable(); - filters_.add(index, builderForValue.build()); - onChanged(); - } else { - filtersBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFilters( - java.lang.Iterable values) { - if (filtersBuilder_ == null) { - ensureFiltersIsMutable(); - super.addAll(values, filters_); - onChanged(); - } else { - filtersBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFilters() { - if (filtersBuilder_ == null) { - filters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - filtersBuilder_.clear(); - } - return this; - } - public Builder removeFilters(int index) { - if (filtersBuilder_ == null) { - ensureFiltersIsMutable(); - filters_.remove(index); - onChanged(); - } else { - filtersBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFiltersBuilder( - int index) { - return getFiltersFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFiltersOrBuilder( - int index) { - if (filtersBuilder_ == null) { - return filters_.get(index); } else { - return filtersBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFiltersOrBuilderList() { - if (filtersBuilder_ != null) { - return filtersBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(filters_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder addFiltersBuilder() { - return getFiltersFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder addFiltersBuilder( - int index) { - return getFiltersFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()); - } - public java.util.List - getFiltersBuilderList() { - return getFiltersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> - getFiltersFieldBuilder() { - if (filtersBuilder_ == null) { - filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( - filters_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - filters_ = null; - } - return filtersBuilder_; - } - - // @@protoc_insertion_point(builder_scope:FilterList) - } - - static { - defaultInstance = new FilterList(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FilterList) - } - - public interface FilterWrapperOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .Filter filter = 1; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - } - public static final class FilterWrapper extends - com.google.protobuf.GeneratedMessage - implements FilterWrapperOrBuilder { - // Use FilterWrapper.newBuilder() to construct. - private FilterWrapper(Builder builder) { - super(builder); - } - private FilterWrapper(boolean noInit) {} - - private static final FilterWrapper defaultInstance; - public static FilterWrapper getDefaultInstance() { - return defaultInstance; - } - - public FilterWrapper getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable; - } - - private int bitField0_; - // required .Filter filter = 1; - public static final int FILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; - } - - private void initFields() { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, filter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, filter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj; - - boolean result = true; - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this; - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFilter()) { - - return false; - } - if (!getFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .Filter filter = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:FilterWrapper) - } - - static { - defaultInstance = new FilterWrapper(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FilterWrapper) - } - - public interface FirstKeyOnlyFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class FirstKeyOnlyFilter extends - com.google.protobuf.GeneratedMessage - implements FirstKeyOnlyFilterOrBuilder { - // Use FirstKeyOnlyFilter.newBuilder() to construct. - private FirstKeyOnlyFilter(Builder builder) { - super(builder); - } - private FirstKeyOnlyFilter(boolean noInit) {} - - private static final FirstKeyOnlyFilter defaultInstance; - public static FirstKeyOnlyFilter getDefaultInstance() { - return defaultInstance; - } - - public FirstKeyOnlyFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:FirstKeyOnlyFilter) - } - - static { - defaultInstance = new FirstKeyOnlyFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FirstKeyOnlyFilter) - } - - public interface FirstKeyValueMatchingQualifiersFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes qualifiers = 1; - java.util.List getQualifiersList(); - int getQualifiersCount(); - com.google.protobuf.ByteString getQualifiers(int index); - } - public static final class FirstKeyValueMatchingQualifiersFilter extends - com.google.protobuf.GeneratedMessage - implements FirstKeyValueMatchingQualifiersFilterOrBuilder { - // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct. - private FirstKeyValueMatchingQualifiersFilter(Builder builder) { - super(builder); - } - private FirstKeyValueMatchingQualifiersFilter(boolean noInit) {} - - private static final FirstKeyValueMatchingQualifiersFilter defaultInstance; - public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { - return defaultInstance; - } - - public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; - } - - // repeated bytes qualifiers = 1; - public static final int QUALIFIERS_FIELD_NUMBER = 1; - private java.util.List qualifiers_; - public java.util.List - getQualifiersList() { - return qualifiers_; - } - public int getQualifiersCount() { - return qualifiers_.size(); - } - public com.google.protobuf.ByteString getQualifiers(int index) { - return qualifiers_.get(index); - } - - private void initFields() { - qualifiers_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < qualifiers_.size(); i++) { - output.writeBytes(1, qualifiers_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < qualifiers_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(qualifiers_.get(i)); - } - size += dataSize; - size += 1 * getQualifiersList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj; - - boolean result = true; - result = result && getQualifiersList() - .equals(other.getQualifiersList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getQualifiersCount() > 0) { - hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER; - hash = (53 * hash) + getQualifiersList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - qualifiers_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.qualifiers_ = qualifiers_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this; - if (!other.qualifiers_.isEmpty()) { - if (qualifiers_.isEmpty()) { - qualifiers_ = other.qualifiers_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureQualifiersIsMutable(); - qualifiers_.addAll(other.qualifiers_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureQualifiersIsMutable(); - qualifiers_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes qualifiers = 1; - private java.util.List qualifiers_ = java.util.Collections.emptyList();; - private void ensureQualifiersIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - qualifiers_ = new java.util.ArrayList(qualifiers_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getQualifiersList() { - return java.util.Collections.unmodifiableList(qualifiers_); - } - public int getQualifiersCount() { - return qualifiers_.size(); - } - public com.google.protobuf.ByteString getQualifiers(int index) { - return qualifiers_.get(index); - } - public Builder setQualifiers( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifiersIsMutable(); - qualifiers_.set(index, value); - onChanged(); - return this; - } - public Builder addQualifiers(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifiersIsMutable(); - qualifiers_.add(value); - onChanged(); - return this; - } - public Builder addAllQualifiers( - java.lang.Iterable values) { - ensureQualifiersIsMutable(); - super.addAll(values, qualifiers_); - onChanged(); - return this; - } - public Builder clearQualifiers() { - qualifiers_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FirstKeyValueMatchingQualifiersFilter) - } - - static { - defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FirstKeyValueMatchingQualifiersFilter) - } - - public interface FuzzyRowFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .BytesBytesPair fuzzyKeysData = 1; - java.util.List - getFuzzyKeysDataList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index); - int getFuzzyKeysDataCount(); - java.util.List - getFuzzyKeysDataOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( - int index); - } - public static final class FuzzyRowFilter extends - com.google.protobuf.GeneratedMessage - implements FuzzyRowFilterOrBuilder { - // Use FuzzyRowFilter.newBuilder() to construct. - private FuzzyRowFilter(Builder builder) { - super(builder); - } - private FuzzyRowFilter(boolean noInit) {} - - private static final FuzzyRowFilter defaultInstance; - public static FuzzyRowFilter getDefaultInstance() { - return defaultInstance; - } - - public FuzzyRowFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable; - } - - // repeated .BytesBytesPair fuzzyKeysData = 1; - public static final int FUZZYKEYSDATA_FIELD_NUMBER = 1; - private java.util.List fuzzyKeysData_; - public java.util.List getFuzzyKeysDataList() { - return fuzzyKeysData_; - } - public java.util.List - getFuzzyKeysDataOrBuilderList() { - return fuzzyKeysData_; - } - public int getFuzzyKeysDataCount() { - return fuzzyKeysData_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { - return fuzzyKeysData_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( - int index) { - return fuzzyKeysData_.get(index); - } - - private void initFields() { - fuzzyKeysData_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getFuzzyKeysDataCount(); i++) { - if (!getFuzzyKeysData(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < fuzzyKeysData_.size(); i++) { - output.writeMessage(1, fuzzyKeysData_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < fuzzyKeysData_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, fuzzyKeysData_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj; - - boolean result = true; - result = result && getFuzzyKeysDataList() - .equals(other.getFuzzyKeysDataList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getFuzzyKeysDataCount() > 0) { - hash = (37 * hash) + FUZZYKEYSDATA_FIELD_NUMBER; - hash = (53 * hash) + getFuzzyKeysDataList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFuzzyKeysDataFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (fuzzyKeysDataBuilder_ == null) { - fuzzyKeysData_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - fuzzyKeysDataBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this); - int from_bitField0_ = bitField0_; - if (fuzzyKeysDataBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.fuzzyKeysData_ = fuzzyKeysData_; - } else { - result.fuzzyKeysData_ = fuzzyKeysDataBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this; - if (fuzzyKeysDataBuilder_ == null) { - if (!other.fuzzyKeysData_.isEmpty()) { - if (fuzzyKeysData_.isEmpty()) { - fuzzyKeysData_ = other.fuzzyKeysData_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.addAll(other.fuzzyKeysData_); - } - onChanged(); - } - } else { - if (!other.fuzzyKeysData_.isEmpty()) { - if (fuzzyKeysDataBuilder_.isEmpty()) { - fuzzyKeysDataBuilder_.dispose(); - fuzzyKeysDataBuilder_ = null; - fuzzyKeysData_ = other.fuzzyKeysData_; - bitField0_ = (bitField0_ & ~0x00000001); - fuzzyKeysDataBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFuzzyKeysDataFieldBuilder() : null; - } else { - fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getFuzzyKeysDataCount(); i++) { - if (!getFuzzyKeysData(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFuzzyKeysData(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .BytesBytesPair fuzzyKeysData = 1; - private java.util.List fuzzyKeysData_ = - java.util.Collections.emptyList(); - private void ensureFuzzyKeysDataIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - fuzzyKeysData_ = new java.util.ArrayList(fuzzyKeysData_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_; - - public java.util.List getFuzzyKeysDataList() { - if (fuzzyKeysDataBuilder_ == null) { - return java.util.Collections.unmodifiableList(fuzzyKeysData_); - } else { - return fuzzyKeysDataBuilder_.getMessageList(); - } - } - public int getFuzzyKeysDataCount() { - if (fuzzyKeysDataBuilder_ == null) { - return fuzzyKeysData_.size(); - } else { - return fuzzyKeysDataBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { - if (fuzzyKeysDataBuilder_ == null) { - return fuzzyKeysData_.get(index); - } else { - return fuzzyKeysDataBuilder_.getMessage(index); - } - } - public Builder setFuzzyKeysData( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { - if (fuzzyKeysDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.set(index, value); - onChanged(); - } else { - fuzzyKeysDataBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFuzzyKeysData( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { - if (fuzzyKeysDataBuilder_ == null) { - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.set(index, builderForValue.build()); - onChanged(); - } else { - fuzzyKeysDataBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { - if (fuzzyKeysDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.add(value); - onChanged(); - } else { - fuzzyKeysDataBuilder_.addMessage(value); - } - return this; - } - public Builder addFuzzyKeysData( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { - if (fuzzyKeysDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.add(index, value); - onChanged(); - } else { - fuzzyKeysDataBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFuzzyKeysData( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { - if (fuzzyKeysDataBuilder_ == null) { - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.add(builderForValue.build()); - onChanged(); - } else { - fuzzyKeysDataBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFuzzyKeysData( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { - if (fuzzyKeysDataBuilder_ == null) { - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.add(index, builderForValue.build()); - onChanged(); - } else { - fuzzyKeysDataBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFuzzyKeysData( - java.lang.Iterable values) { - if (fuzzyKeysDataBuilder_ == null) { - ensureFuzzyKeysDataIsMutable(); - super.addAll(values, fuzzyKeysData_); - onChanged(); - } else { - fuzzyKeysDataBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFuzzyKeysData() { - if (fuzzyKeysDataBuilder_ == null) { - fuzzyKeysData_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - fuzzyKeysDataBuilder_.clear(); - } - return this; - } - public Builder removeFuzzyKeysData(int index) { - if (fuzzyKeysDataBuilder_ == null) { - ensureFuzzyKeysDataIsMutable(); - fuzzyKeysData_.remove(index); - onChanged(); - } else { - fuzzyKeysDataBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder( - int index) { - return getFuzzyKeysDataFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( - int index) { - if (fuzzyKeysDataBuilder_ == null) { - return fuzzyKeysData_.get(index); } else { - return fuzzyKeysDataBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFuzzyKeysDataOrBuilderList() { - if (fuzzyKeysDataBuilder_ != null) { - return fuzzyKeysDataBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(fuzzyKeysData_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() { - return getFuzzyKeysDataFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder( - int index) { - return getFuzzyKeysDataFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); - } - public java.util.List - getFuzzyKeysDataBuilderList() { - return getFuzzyKeysDataFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> - getFuzzyKeysDataFieldBuilder() { - if (fuzzyKeysDataBuilder_ == null) { - fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( - fuzzyKeysData_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - fuzzyKeysData_ = null; - } - return fuzzyKeysDataBuilder_; - } - - // @@protoc_insertion_point(builder_scope:FuzzyRowFilter) - } - - static { - defaultInstance = new FuzzyRowFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FuzzyRowFilter) - } - - public interface InclusiveStopFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes stopRowKey = 1; - boolean hasStopRowKey(); - com.google.protobuf.ByteString getStopRowKey(); - } - public static final class InclusiveStopFilter extends - com.google.protobuf.GeneratedMessage - implements InclusiveStopFilterOrBuilder { - // Use InclusiveStopFilter.newBuilder() to construct. - private InclusiveStopFilter(Builder builder) { - super(builder); - } - private InclusiveStopFilter(boolean noInit) {} - - private static final InclusiveStopFilter defaultInstance; - public static InclusiveStopFilter getDefaultInstance() { - return defaultInstance; - } - - public InclusiveStopFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes stopRowKey = 1; - public static final int STOPROWKEY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString stopRowKey_; - public boolean hasStopRowKey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getStopRowKey() { - return stopRowKey_; - } - - private void initFields() { - stopRowKey_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, stopRowKey_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, stopRowKey_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj; - - boolean result = true; - result = result && (hasStopRowKey() == other.hasStopRowKey()); - if (hasStopRowKey()) { - result = result && getStopRowKey() - .equals(other.getStopRowKey()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasStopRowKey()) { - hash = (37 * hash) + STOPROWKEY_FIELD_NUMBER; - hash = (53 * hash) + getStopRowKey().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - stopRowKey_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.stopRowKey_ = stopRowKey_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this; - if (other.hasStopRowKey()) { - setStopRowKey(other.getStopRowKey()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - stopRowKey_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes stopRowKey = 1; - private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasStopRowKey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getStopRowKey() { - return stopRowKey_; - } - public Builder setStopRowKey(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - stopRowKey_ = value; - onChanged(); - return this; - } - public Builder clearStopRowKey() { - bitField0_ = (bitField0_ & ~0x00000001); - stopRowKey_ = getDefaultInstance().getStopRowKey(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:InclusiveStopFilter) - } - - static { - defaultInstance = new InclusiveStopFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:InclusiveStopFilter) - } - - public interface KeyOnlyFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool lenAsVal = 1; - boolean hasLenAsVal(); - boolean getLenAsVal(); - } - public static final class KeyOnlyFilter extends - com.google.protobuf.GeneratedMessage - implements KeyOnlyFilterOrBuilder { - // Use KeyOnlyFilter.newBuilder() to construct. - private KeyOnlyFilter(Builder builder) { - super(builder); - } - private KeyOnlyFilter(boolean noInit) {} - - private static final KeyOnlyFilter defaultInstance; - public static KeyOnlyFilter getDefaultInstance() { - return defaultInstance; - } - - public KeyOnlyFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable; - } - - private int bitField0_; - // required bool lenAsVal = 1; - public static final int LENASVAL_FIELD_NUMBER = 1; - private boolean lenAsVal_; - public boolean hasLenAsVal() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getLenAsVal() { - return lenAsVal_; - } - - private void initFields() { - lenAsVal_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLenAsVal()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, lenAsVal_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, lenAsVal_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj; - - boolean result = true; - result = result && (hasLenAsVal() == other.hasLenAsVal()); - if (hasLenAsVal()) { - result = result && (getLenAsVal() - == other.getLenAsVal()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLenAsVal()) { - hash = (37 * hash) + LENASVAL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLenAsVal()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lenAsVal_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lenAsVal_ = lenAsVal_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this; - if (other.hasLenAsVal()) { - setLenAsVal(other.getLenAsVal()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLenAsVal()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lenAsVal_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool lenAsVal = 1; - private boolean lenAsVal_ ; - public boolean hasLenAsVal() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getLenAsVal() { - return lenAsVal_; - } - public Builder setLenAsVal(boolean value) { - bitField0_ |= 0x00000001; - lenAsVal_ = value; - onChanged(); - return this; - } - public Builder clearLenAsVal() { - bitField0_ = (bitField0_ & ~0x00000001); - lenAsVal_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:KeyOnlyFilter) - } - - static { - defaultInstance = new KeyOnlyFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:KeyOnlyFilter) - } - - public interface MultipleColumnPrefixFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes sortedPrefixes = 1; - java.util.List getSortedPrefixesList(); - int getSortedPrefixesCount(); - com.google.protobuf.ByteString getSortedPrefixes(int index); - } - public static final class MultipleColumnPrefixFilter extends - com.google.protobuf.GeneratedMessage - implements MultipleColumnPrefixFilterOrBuilder { - // Use MultipleColumnPrefixFilter.newBuilder() to construct. - private MultipleColumnPrefixFilter(Builder builder) { - super(builder); - } - private MultipleColumnPrefixFilter(boolean noInit) {} - - private static final MultipleColumnPrefixFilter defaultInstance; - public static MultipleColumnPrefixFilter getDefaultInstance() { - return defaultInstance; - } - - public MultipleColumnPrefixFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; - } - - // repeated bytes sortedPrefixes = 1; - public static final int SORTEDPREFIXES_FIELD_NUMBER = 1; - private java.util.List sortedPrefixes_; - public java.util.List - getSortedPrefixesList() { - return sortedPrefixes_; - } - public int getSortedPrefixesCount() { - return sortedPrefixes_.size(); - } - public com.google.protobuf.ByteString getSortedPrefixes(int index) { - return sortedPrefixes_.get(index); - } - - private void initFields() { - sortedPrefixes_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < sortedPrefixes_.size(); i++) { - output.writeBytes(1, sortedPrefixes_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < sortedPrefixes_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(sortedPrefixes_.get(i)); - } - size += dataSize; - size += 1 * getSortedPrefixesList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj; - - boolean result = true; - result = result && getSortedPrefixesList() - .equals(other.getSortedPrefixesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getSortedPrefixesCount() > 0) { - hash = (37 * hash) + SORTEDPREFIXES_FIELD_NUMBER; - hash = (53 * hash) + getSortedPrefixesList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - sortedPrefixes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.sortedPrefixes_ = sortedPrefixes_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance()) return this; - if (!other.sortedPrefixes_.isEmpty()) { - if (sortedPrefixes_.isEmpty()) { - sortedPrefixes_ = other.sortedPrefixes_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureSortedPrefixesIsMutable(); - sortedPrefixes_.addAll(other.sortedPrefixes_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureSortedPrefixesIsMutable(); - sortedPrefixes_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes sortedPrefixes = 1; - private java.util.List sortedPrefixes_ = java.util.Collections.emptyList();; - private void ensureSortedPrefixesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - sortedPrefixes_ = new java.util.ArrayList(sortedPrefixes_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getSortedPrefixesList() { - return java.util.Collections.unmodifiableList(sortedPrefixes_); - } - public int getSortedPrefixesCount() { - return sortedPrefixes_.size(); - } - public com.google.protobuf.ByteString getSortedPrefixes(int index) { - return sortedPrefixes_.get(index); - } - public Builder setSortedPrefixes( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureSortedPrefixesIsMutable(); - sortedPrefixes_.set(index, value); - onChanged(); - return this; - } - public Builder addSortedPrefixes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureSortedPrefixesIsMutable(); - sortedPrefixes_.add(value); - onChanged(); - return this; - } - public Builder addAllSortedPrefixes( - java.lang.Iterable values) { - ensureSortedPrefixesIsMutable(); - super.addAll(values, sortedPrefixes_); - onChanged(); - return this; - } - public Builder clearSortedPrefixes() { - sortedPrefixes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:MultipleColumnPrefixFilter) - } - - static { - defaultInstance = new MultipleColumnPrefixFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultipleColumnPrefixFilter) - } - - public interface PageFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int64 pageSize = 1; - boolean hasPageSize(); - long getPageSize(); - } - public static final class PageFilter extends - com.google.protobuf.GeneratedMessage - implements PageFilterOrBuilder { - // Use PageFilter.newBuilder() to construct. - private PageFilter(Builder builder) { - super(builder); - } - private PageFilter(boolean noInit) {} - - private static final PageFilter defaultInstance; - public static PageFilter getDefaultInstance() { - return defaultInstance; - } - - public PageFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable; - } - - private int bitField0_; - // required int64 pageSize = 1; - public static final int PAGESIZE_FIELD_NUMBER = 1; - private long pageSize_; - public boolean hasPageSize() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getPageSize() { - return pageSize_; - } - - private void initFields() { - pageSize_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPageSize()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, pageSize_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, pageSize_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) obj; - - boolean result = true; - result = result && (hasPageSize() == other.hasPageSize()); - if (hasPageSize()) { - result = result && (getPageSize() - == other.getPageSize()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPageSize()) { - hash = (37 * hash) + PAGESIZE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getPageSize()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - pageSize_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.pageSize_ = pageSize_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance()) return this; - if (other.hasPageSize()) { - setPageSize(other.getPageSize()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPageSize()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - pageSize_ = input.readInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required int64 pageSize = 1; - private long pageSize_ ; - public boolean hasPageSize() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getPageSize() { - return pageSize_; - } - public Builder setPageSize(long value) { - bitField0_ |= 0x00000001; - pageSize_ = value; - onChanged(); - return this; - } - public Builder clearPageSize() { - bitField0_ = (bitField0_ & ~0x00000001); - pageSize_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:PageFilter) - } - - static { - defaultInstance = new PageFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:PageFilter) - } - - public interface PrefixFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes prefix = 1; - boolean hasPrefix(); - com.google.protobuf.ByteString getPrefix(); - } - public static final class PrefixFilter extends - com.google.protobuf.GeneratedMessage - implements PrefixFilterOrBuilder { - // Use PrefixFilter.newBuilder() to construct. - private PrefixFilter(Builder builder) { - super(builder); - } - private PrefixFilter(boolean noInit) {} - - private static final PrefixFilter defaultInstance; - public static PrefixFilter getDefaultInstance() { - return defaultInstance; - } - - public PrefixFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes prefix = 1; - public static final int PREFIX_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString prefix_; - public boolean hasPrefix() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getPrefix() { - return prefix_; - } - - private void initFields() { - prefix_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, prefix_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, prefix_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) obj; - - boolean result = true; - result = result && (hasPrefix() == other.hasPrefix()); - if (hasPrefix()) { - result = result && getPrefix() - .equals(other.getPrefix()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPrefix()) { - hash = (37 * hash) + PREFIX_FIELD_NUMBER; - hash = (53 * hash) + getPrefix().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - prefix_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.prefix_ = prefix_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance()) return this; - if (other.hasPrefix()) { - setPrefix(other.getPrefix()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - prefix_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes prefix = 1; - private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasPrefix() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getPrefix() { - return prefix_; - } - public Builder setPrefix(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - prefix_ = value; - onChanged(); - return this; - } - public Builder clearPrefix() { - bitField0_ = (bitField0_ & ~0x00000001); - prefix_ = getDefaultInstance().getPrefix(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:PrefixFilter) - } - - static { - defaultInstance = new PrefixFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:PrefixFilter) - } - - public interface QualifierFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .CompareFilter compareFilter = 1; - boolean hasCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - } - public static final class QualifierFilter extends - com.google.protobuf.GeneratedMessage - implements QualifierFilterOrBuilder { - // Use QualifierFilter.newBuilder() to construct. - private QualifierFilter(Builder builder) { - super(builder); - } - private QualifierFilter(boolean noInit) {} - - private static final QualifierFilter defaultInstance; - public static QualifierFilter getDefaultInstance() { - return defaultInstance; - } - - public QualifierFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .CompareFilter compareFilter = 1; - public static final int COMPAREFILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; - } - - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCompareFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) obj; - - boolean result = true; - result = result && (hasCompareFilter() == other.hasCompareFilter()); - if (hasCompareFilter()) { - result = result && getCompareFilter() - .equals(other.getCompareFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCompareFilter()) { - hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; - hash = (53 * hash) + getCompareFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getCompareFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (compareFilterBuilder_ == null) { - result.compareFilter_ = compareFilter_; - } else { - result.compareFilter_ = compareFilterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance()) return this; - if (other.hasCompareFilter()) { - mergeCompareFilter(other.getCompareFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareFilter()) { - - return false; - } - if (!getCompareFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .CompareFilter compareFilter = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - if (compareFilterBuilder_ == null) { - return compareFilter_; - } else { - return compareFilterBuilder_.getMessage(); - } - } - public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - compareFilter_ = value; - onChanged(); - } else { - compareFilterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setCompareFilter( - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { - if (compareFilterBuilder_ == null) { - compareFilter_ = builderForValue.build(); - onChanged(); - } else { - compareFilterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { - compareFilter_ = - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); - } else { - compareFilter_ = value; - } - onChanged(); - } else { - compareFilterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearCompareFilter() { - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - onChanged(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getCompareFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - if (compareFilterBuilder_ != null) { - return compareFilterBuilder_.getMessageOrBuilder(); - } else { - return compareFilter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> - getCompareFilterFieldBuilder() { - if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, - getParentForChildren(), - isClean()); - compareFilter_ = null; - } - return compareFilterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:QualifierFilter) - } - - static { - defaultInstance = new QualifierFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:QualifierFilter) - } - - public interface RandomRowFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required float chance = 1; - boolean hasChance(); - float getChance(); - } - public static final class RandomRowFilter extends - com.google.protobuf.GeneratedMessage - implements RandomRowFilterOrBuilder { - // Use RandomRowFilter.newBuilder() to construct. - private RandomRowFilter(Builder builder) { - super(builder); - } - private RandomRowFilter(boolean noInit) {} - - private static final RandomRowFilter defaultInstance; - public static RandomRowFilter getDefaultInstance() { - return defaultInstance; - } - - public RandomRowFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable; - } - - private int bitField0_; - // required float chance = 1; - public static final int CHANCE_FIELD_NUMBER = 1; - private float chance_; - public boolean hasChance() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public float getChance() { - return chance_; - } - - private void initFields() { - chance_ = 0F; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasChance()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeFloat(1, chance_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeFloatSize(1, chance_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) obj; - - boolean result = true; - result = result && (hasChance() == other.hasChance()); - if (hasChance()) { - result = result && (Float.floatToIntBits(getChance()) == Float.floatToIntBits(other.getChance())); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasChance()) { - hash = (37 * hash) + CHANCE_FIELD_NUMBER; - hash = (53 * hash) + Float.floatToIntBits( - getChance()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - chance_ = 0F; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.chance_ = chance_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance()) return this; - if (other.hasChance()) { - setChance(other.getChance()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasChance()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 13: { - bitField0_ |= 0x00000001; - chance_ = input.readFloat(); - break; - } - } - } - } - - private int bitField0_; - - // required float chance = 1; - private float chance_ ; - public boolean hasChance() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public float getChance() { - return chance_; - } - public Builder setChance(float value) { - bitField0_ |= 0x00000001; - chance_ = value; - onChanged(); - return this; - } - public Builder clearChance() { - bitField0_ = (bitField0_ & ~0x00000001); - chance_ = 0F; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RandomRowFilter) - } - - static { - defaultInstance = new RandomRowFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RandomRowFilter) - } - - public interface RowFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .CompareFilter compareFilter = 1; - boolean hasCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - } - public static final class RowFilter extends - com.google.protobuf.GeneratedMessage - implements RowFilterOrBuilder { - // Use RowFilter.newBuilder() to construct. - private RowFilter(Builder builder) { - super(builder); - } - private RowFilter(boolean noInit) {} - - private static final RowFilter defaultInstance; - public static RowFilter getDefaultInstance() { - return defaultInstance; - } - - public RowFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .CompareFilter compareFilter = 1; - public static final int COMPAREFILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; - } - - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCompareFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) obj; - - boolean result = true; - result = result && (hasCompareFilter() == other.hasCompareFilter()); - if (hasCompareFilter()) { - result = result && getCompareFilter() - .equals(other.getCompareFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCompareFilter()) { - hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; - hash = (53 * hash) + getCompareFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getCompareFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (compareFilterBuilder_ == null) { - result.compareFilter_ = compareFilter_; - } else { - result.compareFilter_ = compareFilterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance()) return this; - if (other.hasCompareFilter()) { - mergeCompareFilter(other.getCompareFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareFilter()) { - - return false; - } - if (!getCompareFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .CompareFilter compareFilter = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - if (compareFilterBuilder_ == null) { - return compareFilter_; - } else { - return compareFilterBuilder_.getMessage(); - } - } - public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - compareFilter_ = value; - onChanged(); - } else { - compareFilterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setCompareFilter( - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { - if (compareFilterBuilder_ == null) { - compareFilter_ = builderForValue.build(); - onChanged(); - } else { - compareFilterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { - compareFilter_ = - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); - } else { - compareFilter_ = value; - } - onChanged(); - } else { - compareFilterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearCompareFilter() { - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - onChanged(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getCompareFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - if (compareFilterBuilder_ != null) { - return compareFilterBuilder_.getMessageOrBuilder(); - } else { - return compareFilter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> - getCompareFilterFieldBuilder() { - if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, - getParentForChildren(), - isClean()); - compareFilter_ = null; - } - return compareFilterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RowFilter) - } - - static { - defaultInstance = new RowFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RowFilter) - } - - public interface SingleColumnValueExcludeFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .SingleColumnValueFilter singleColumnValueFilter = 1; - boolean hasSingleColumnValueFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder(); - } - public static final class SingleColumnValueExcludeFilter extends - com.google.protobuf.GeneratedMessage - implements SingleColumnValueExcludeFilterOrBuilder { - // Use SingleColumnValueExcludeFilter.newBuilder() to construct. - private SingleColumnValueExcludeFilter(Builder builder) { - super(builder); - } - private SingleColumnValueExcludeFilter(boolean noInit) {} - - private static final SingleColumnValueExcludeFilter defaultInstance; - public static SingleColumnValueExcludeFilter getDefaultInstance() { - return defaultInstance; - } - - public SingleColumnValueExcludeFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .SingleColumnValueFilter singleColumnValueFilter = 1; - public static final int SINGLECOLUMNVALUEFILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_; - public boolean hasSingleColumnValueFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { - return singleColumnValueFilter_; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { - return singleColumnValueFilter_; - } - - private void initFields() { - singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasSingleColumnValueFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getSingleColumnValueFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, singleColumnValueFilter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, singleColumnValueFilter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) obj; - - boolean result = true; - result = result && (hasSingleColumnValueFilter() == other.hasSingleColumnValueFilter()); - if (hasSingleColumnValueFilter()) { - result = result && getSingleColumnValueFilter() - .equals(other.getSingleColumnValueFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSingleColumnValueFilter()) { - hash = (37 * hash) + SINGLECOLUMNVALUEFILTER_FIELD_NUMBER; - hash = (53 * hash) + getSingleColumnValueFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getSingleColumnValueFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - } else { - singleColumnValueFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (singleColumnValueFilterBuilder_ == null) { - result.singleColumnValueFilter_ = singleColumnValueFilter_; - } else { - result.singleColumnValueFilter_ = singleColumnValueFilterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance()) return this; - if (other.hasSingleColumnValueFilter()) { - mergeSingleColumnValueFilter(other.getSingleColumnValueFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasSingleColumnValueFilter()) { - - return false; - } - if (!getSingleColumnValueFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(); - if (hasSingleColumnValueFilter()) { - subBuilder.mergeFrom(getSingleColumnValueFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setSingleColumnValueFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .SingleColumnValueFilter singleColumnValueFilter = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_; - public boolean hasSingleColumnValueFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { - if (singleColumnValueFilterBuilder_ == null) { - return singleColumnValueFilter_; - } else { - return singleColumnValueFilterBuilder_.getMessage(); - } - } - public Builder setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { - if (singleColumnValueFilterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - singleColumnValueFilter_ = value; - onChanged(); - } else { - singleColumnValueFilterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setSingleColumnValueFilter( - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue) { - if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilter_ = builderForValue.build(); - onChanged(); - } else { - singleColumnValueFilterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { - if (singleColumnValueFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - singleColumnValueFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) { - singleColumnValueFilter_ = - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(singleColumnValueFilter_).mergeFrom(value).buildPartial(); - } else { - singleColumnValueFilter_ = value; - } - onChanged(); - } else { - singleColumnValueFilterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearSingleColumnValueFilter() { - if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - onChanged(); - } else { - singleColumnValueFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder getSingleColumnValueFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getSingleColumnValueFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { - if (singleColumnValueFilterBuilder_ != null) { - return singleColumnValueFilterBuilder_.getMessageOrBuilder(); - } else { - return singleColumnValueFilter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> - getSingleColumnValueFilterFieldBuilder() { - if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>( - singleColumnValueFilter_, - getParentForChildren(), - isClean()); - singleColumnValueFilter_ = null; - } - return singleColumnValueFilterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:SingleColumnValueExcludeFilter) - } - - static { - defaultInstance = new SingleColumnValueExcludeFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SingleColumnValueExcludeFilter) - } - - public interface SingleColumnValueFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes columnFamily = 1; - boolean hasColumnFamily(); - com.google.protobuf.ByteString getColumnFamily(); - - // optional bytes columnQualifier = 2; - boolean hasColumnQualifier(); - com.google.protobuf.ByteString getColumnQualifier(); - - // required .CompareType compareOp = 3; - boolean hasCompareOp(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); - - // required .Comparator comparator = 4; - boolean hasComparator(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); - - // optional bool foundColumn = 5; - boolean hasFoundColumn(); - boolean getFoundColumn(); - - // optional bool matchedColumn = 6; - boolean hasMatchedColumn(); - boolean getMatchedColumn(); - - // optional bool filterIfMissing = 7; - boolean hasFilterIfMissing(); - boolean getFilterIfMissing(); - - // optional bool latestVersionOnly = 8; - boolean hasLatestVersionOnly(); - boolean getLatestVersionOnly(); - } - public static final class SingleColumnValueFilter extends - com.google.protobuf.GeneratedMessage - implements SingleColumnValueFilterOrBuilder { - // Use SingleColumnValueFilter.newBuilder() to construct. - private SingleColumnValueFilter(Builder builder) { - super(builder); - } - private SingleColumnValueFilter(boolean noInit) {} - - private static final SingleColumnValueFilter defaultInstance; - public static SingleColumnValueFilter getDefaultInstance() { - return defaultInstance; - } - - public SingleColumnValueFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes columnFamily = 1; - public static final int COLUMNFAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString columnFamily_; - public boolean hasColumnFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getColumnFamily() { - return columnFamily_; - } - - // optional bytes columnQualifier = 2; - public static final int COLUMNQUALIFIER_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString columnQualifier_; - public boolean hasColumnQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getColumnQualifier() { - return columnQualifier_; - } - - // required .CompareType compareOp = 3; - public static final int COMPAREOP_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; - public boolean hasCompareOp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; - } - - // required .Comparator comparator = 4; - public static final int COMPARATOR_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { - return comparator_; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - return comparator_; - } - - // optional bool foundColumn = 5; - public static final int FOUNDCOLUMN_FIELD_NUMBER = 5; - private boolean foundColumn_; - public boolean hasFoundColumn() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getFoundColumn() { - return foundColumn_; - } - - // optional bool matchedColumn = 6; - public static final int MATCHEDCOLUMN_FIELD_NUMBER = 6; - private boolean matchedColumn_; - public boolean hasMatchedColumn() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getMatchedColumn() { - return matchedColumn_; - } - - // optional bool filterIfMissing = 7; - public static final int FILTERIFMISSING_FIELD_NUMBER = 7; - private boolean filterIfMissing_; - public boolean hasFilterIfMissing() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public boolean getFilterIfMissing() { - return filterIfMissing_; - } - - // optional bool latestVersionOnly = 8; - public static final int LATESTVERSIONONLY_FIELD_NUMBER = 8; - private boolean latestVersionOnly_; - public boolean hasLatestVersionOnly() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public boolean getLatestVersionOnly() { - return latestVersionOnly_; - } - - private void initFields() { - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - foundColumn_ = false; - matchedColumn_ = false; - filterIfMissing_ = false; - latestVersionOnly_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareOp()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasComparator()) { - memoizedIsInitialized = 0; - return false; - } - if (!getComparator().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, columnFamily_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, columnQualifier_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeEnum(3, compareOp_.getNumber()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, comparator_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(5, foundColumn_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(6, matchedColumn_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeBool(7, filterIfMissing_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeBool(8, latestVersionOnly_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, columnFamily_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, columnQualifier_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(3, compareOp_.getNumber()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, comparator_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(5, foundColumn_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(6, matchedColumn_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(7, filterIfMissing_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, latestVersionOnly_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) obj; - - boolean result = true; - result = result && (hasColumnFamily() == other.hasColumnFamily()); - if (hasColumnFamily()) { - result = result && getColumnFamily() - .equals(other.getColumnFamily()); - } - result = result && (hasColumnQualifier() == other.hasColumnQualifier()); - if (hasColumnQualifier()) { - result = result && getColumnQualifier() - .equals(other.getColumnQualifier()); - } - result = result && (hasCompareOp() == other.hasCompareOp()); - if (hasCompareOp()) { - result = result && - (getCompareOp() == other.getCompareOp()); - } - result = result && (hasComparator() == other.hasComparator()); - if (hasComparator()) { - result = result && getComparator() - .equals(other.getComparator()); - } - result = result && (hasFoundColumn() == other.hasFoundColumn()); - if (hasFoundColumn()) { - result = result && (getFoundColumn() - == other.getFoundColumn()); - } - result = result && (hasMatchedColumn() == other.hasMatchedColumn()); - if (hasMatchedColumn()) { - result = result && (getMatchedColumn() - == other.getMatchedColumn()); - } - result = result && (hasFilterIfMissing() == other.hasFilterIfMissing()); - if (hasFilterIfMissing()) { - result = result && (getFilterIfMissing() - == other.getFilterIfMissing()); - } - result = result && (hasLatestVersionOnly() == other.hasLatestVersionOnly()); - if (hasLatestVersionOnly()) { - result = result && (getLatestVersionOnly() - == other.getLatestVersionOnly()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasColumnFamily()) { - hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamily().hashCode(); - } - if (hasColumnQualifier()) { - hash = (37 * hash) + COLUMNQUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getColumnQualifier().hashCode(); - } - if (hasCompareOp()) { - hash = (37 * hash) + COMPAREOP_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareOp()); - } - if (hasComparator()) { - hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; - hash = (53 * hash) + getComparator().hashCode(); - } - if (hasFoundColumn()) { - hash = (37 * hash) + FOUNDCOLUMN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getFoundColumn()); - } - if (hasMatchedColumn()) { - hash = (37 * hash) + MATCHEDCOLUMN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMatchedColumn()); - } - if (hasFilterIfMissing()) { - hash = (37 * hash) + FILTERIFMISSING_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getFilterIfMissing()); - } - if (hasLatestVersionOnly()) { - hash = (37 * hash) + LATESTVERSIONONLY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLatestVersionOnly()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getComparatorFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - bitField0_ = (bitField0_ & ~0x00000004); - if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } else { - comparatorBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000008); - foundColumn_ = false; - bitField0_ = (bitField0_ & ~0x00000010); - matchedColumn_ = false; - bitField0_ = (bitField0_ & ~0x00000020); - filterIfMissing_ = false; - bitField0_ = (bitField0_ & ~0x00000040); - latestVersionOnly_ = false; - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.columnFamily_ = columnFamily_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.columnQualifier_ = columnQualifier_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.compareOp_ = compareOp_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - if (comparatorBuilder_ == null) { - result.comparator_ = comparator_; - } else { - result.comparator_ = comparatorBuilder_.build(); - } - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.foundColumn_ = foundColumn_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.matchedColumn_ = matchedColumn_; - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000040; - } - result.filterIfMissing_ = filterIfMissing_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000080; - } - result.latestVersionOnly_ = latestVersionOnly_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) return this; - if (other.hasColumnFamily()) { - setColumnFamily(other.getColumnFamily()); - } - if (other.hasColumnQualifier()) { - setColumnQualifier(other.getColumnQualifier()); - } - if (other.hasCompareOp()) { - setCompareOp(other.getCompareOp()); - } - if (other.hasComparator()) { - mergeComparator(other.getComparator()); - } - if (other.hasFoundColumn()) { - setFoundColumn(other.getFoundColumn()); - } - if (other.hasMatchedColumn()) { - setMatchedColumn(other.getMatchedColumn()); - } - if (other.hasFilterIfMissing()) { - setFilterIfMissing(other.getFilterIfMissing()); - } - if (other.hasLatestVersionOnly()) { - setLatestVersionOnly(other.getLatestVersionOnly()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareOp()) { - - return false; - } - if (!hasComparator()) { - - return false; - } - if (!getComparator().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - columnFamily_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - columnQualifier_ = input.readBytes(); - break; - } - case 24: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(3, rawValue); - } else { - bitField0_ |= 0x00000004; - compareOp_ = value; - } - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(); - if (hasComparator()) { - subBuilder.mergeFrom(getComparator()); - } - input.readMessage(subBuilder, extensionRegistry); - setComparator(subBuilder.buildPartial()); - break; - } - case 40: { - bitField0_ |= 0x00000010; - foundColumn_ = input.readBool(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - matchedColumn_ = input.readBool(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - filterIfMissing_ = input.readBool(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - latestVersionOnly_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes columnFamily = 1; - private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasColumnFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getColumnFamily() { - return columnFamily_; - } - public Builder setColumnFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - columnFamily_ = value; - onChanged(); - return this; - } - public Builder clearColumnFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - columnFamily_ = getDefaultInstance().getColumnFamily(); - onChanged(); - return this; - } - - // optional bytes columnQualifier = 2; - private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasColumnQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getColumnQualifier() { - return columnQualifier_; - } - public Builder setColumnQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - columnQualifier_ = value; - onChanged(); - return this; - } - public Builder clearColumnQualifier() { - bitField0_ = (bitField0_ & ~0x00000002); - columnQualifier_ = getDefaultInstance().getColumnQualifier(); - onChanged(); - return this; - } - - // required .CompareType compareOp = 3; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - public boolean hasCompareOp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; - } - public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - compareOp_ = value; - onChanged(); - return this; - } - public Builder clearCompareOp() { - bitField0_ = (bitField0_ & ~0x00000004); - compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; - onChanged(); - return this; - } - - // required .Comparator comparator = 4; - private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { - if (comparatorBuilder_ == null) { - return comparator_; - } else { - return comparatorBuilder_.getMessage(); - } - } - public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { - if (comparatorBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - comparator_ = value; - onChanged(); - } else { - comparatorBuilder_.setMessage(value); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder setComparator( - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { - if (comparatorBuilder_ == null) { - comparator_ = builderForValue.build(); - onChanged(); - } else { - comparatorBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { - if (comparatorBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008) && - comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { - comparator_ = - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); - } else { - comparator_ = value; - } - onChanged(); - } else { - comparatorBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder clearComparator() { - if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - onChanged(); - } else { - comparatorBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { - bitField0_ |= 0x00000008; - onChanged(); - return getComparatorFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - if (comparatorBuilder_ != null) { - return comparatorBuilder_.getMessageOrBuilder(); - } else { - return comparator_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> - getComparatorFieldBuilder() { - if (comparatorBuilder_ == null) { - comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( - comparator_, - getParentForChildren(), - isClean()); - comparator_ = null; - } - return comparatorBuilder_; - } - - // optional bool foundColumn = 5; - private boolean foundColumn_ ; - public boolean hasFoundColumn() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getFoundColumn() { - return foundColumn_; - } - public Builder setFoundColumn(boolean value) { - bitField0_ |= 0x00000010; - foundColumn_ = value; - onChanged(); - return this; - } - public Builder clearFoundColumn() { - bitField0_ = (bitField0_ & ~0x00000010); - foundColumn_ = false; - onChanged(); - return this; - } - - // optional bool matchedColumn = 6; - private boolean matchedColumn_ ; - public boolean hasMatchedColumn() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getMatchedColumn() { - return matchedColumn_; - } - public Builder setMatchedColumn(boolean value) { - bitField0_ |= 0x00000020; - matchedColumn_ = value; - onChanged(); - return this; - } - public Builder clearMatchedColumn() { - bitField0_ = (bitField0_ & ~0x00000020); - matchedColumn_ = false; - onChanged(); - return this; - } - - // optional bool filterIfMissing = 7; - private boolean filterIfMissing_ ; - public boolean hasFilterIfMissing() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public boolean getFilterIfMissing() { - return filterIfMissing_; - } - public Builder setFilterIfMissing(boolean value) { - bitField0_ |= 0x00000040; - filterIfMissing_ = value; - onChanged(); - return this; - } - public Builder clearFilterIfMissing() { - bitField0_ = (bitField0_ & ~0x00000040); - filterIfMissing_ = false; - onChanged(); - return this; - } - - // optional bool latestVersionOnly = 8; - private boolean latestVersionOnly_ ; - public boolean hasLatestVersionOnly() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public boolean getLatestVersionOnly() { - return latestVersionOnly_; - } - public Builder setLatestVersionOnly(boolean value) { - bitField0_ |= 0x00000080; - latestVersionOnly_ = value; - onChanged(); - return this; - } - public Builder clearLatestVersionOnly() { - bitField0_ = (bitField0_ & ~0x00000080); - latestVersionOnly_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SingleColumnValueFilter) - } - - static { - defaultInstance = new SingleColumnValueFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SingleColumnValueFilter) - } - - public interface SkipFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .Filter filter = 1; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - } - public static final class SkipFilter extends - com.google.protobuf.GeneratedMessage - implements SkipFilterOrBuilder { - // Use SkipFilter.newBuilder() to construct. - private SkipFilter(Builder builder) { - super(builder); - } - private SkipFilter(boolean noInit) {} - - private static final SkipFilter defaultInstance; - public static SkipFilter getDefaultInstance() { - return defaultInstance; - } - - public SkipFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .Filter filter = 1; - public static final int FILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; - } - - private void initFields() { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, filter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, filter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) obj; - - boolean result = true; - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance()) return this; - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFilter()) { - - return false; - } - if (!getFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .Filter filter = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:SkipFilter) - } - - static { - defaultInstance = new SkipFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SkipFilter) - } - - public interface TimestampsFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated int64 timestamps = 1; - java.util.List getTimestampsList(); - int getTimestampsCount(); - long getTimestamps(int index); - } - public static final class TimestampsFilter extends - com.google.protobuf.GeneratedMessage - implements TimestampsFilterOrBuilder { - // Use TimestampsFilter.newBuilder() to construct. - private TimestampsFilter(Builder builder) { - super(builder); - } - private TimestampsFilter(boolean noInit) {} - - private static final TimestampsFilter defaultInstance; - public static TimestampsFilter getDefaultInstance() { - return defaultInstance; - } - - public TimestampsFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable; - } - - // repeated int64 timestamps = 1; - public static final int TIMESTAMPS_FIELD_NUMBER = 1; - private java.util.List timestamps_; - public java.util.List - getTimestampsList() { - return timestamps_; - } - public int getTimestampsCount() { - return timestamps_.size(); - } - public long getTimestamps(int index) { - return timestamps_.get(index); - } - - private void initFields() { - timestamps_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < timestamps_.size(); i++) { - output.writeInt64(1, timestamps_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < timestamps_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeInt64SizeNoTag(timestamps_.get(i)); - } - size += dataSize; - size += 1 * getTimestampsList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) obj; - - boolean result = true; - result = result && getTimestampsList() - .equals(other.getTimestampsList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getTimestampsCount() > 0) { - hash = (37 * hash) + TIMESTAMPS_FIELD_NUMBER; - hash = (53 * hash) + getTimestampsList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - timestamps_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - timestamps_ = java.util.Collections.unmodifiableList(timestamps_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.timestamps_ = timestamps_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance()) return this; - if (!other.timestamps_.isEmpty()) { - if (timestamps_.isEmpty()) { - timestamps_ = other.timestamps_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureTimestampsIsMutable(); - timestamps_.addAll(other.timestamps_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - ensureTimestampsIsMutable(); - timestamps_.add(input.readInt64()); - break; - } - case 10: { - int length = input.readRawVarint32(); - int limit = input.pushLimit(length); - while (input.getBytesUntilLimit() > 0) { - addTimestamps(input.readInt64()); - } - input.popLimit(limit); - break; - } - } - } - } - - private int bitField0_; - - // repeated int64 timestamps = 1; - private java.util.List timestamps_ = java.util.Collections.emptyList();; - private void ensureTimestampsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - timestamps_ = new java.util.ArrayList(timestamps_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getTimestampsList() { - return java.util.Collections.unmodifiableList(timestamps_); - } - public int getTimestampsCount() { - return timestamps_.size(); - } - public long getTimestamps(int index) { - return timestamps_.get(index); - } - public Builder setTimestamps( - int index, long value) { - ensureTimestampsIsMutable(); - timestamps_.set(index, value); - onChanged(); - return this; - } - public Builder addTimestamps(long value) { - ensureTimestampsIsMutable(); - timestamps_.add(value); - onChanged(); - return this; - } - public Builder addAllTimestamps( - java.lang.Iterable values) { - ensureTimestampsIsMutable(); - super.addAll(values, timestamps_); - onChanged(); - return this; - } - public Builder clearTimestamps() { - timestamps_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:TimestampsFilter) - } - - static { - defaultInstance = new TimestampsFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TimestampsFilter) - } - - public interface ValueFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .CompareFilter compareFilter = 1; - boolean hasCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - } - public static final class ValueFilter extends - com.google.protobuf.GeneratedMessage - implements ValueFilterOrBuilder { - // Use ValueFilter.newBuilder() to construct. - private ValueFilter(Builder builder) { - super(builder); - } - private ValueFilter(boolean noInit) {} - - private static final ValueFilter defaultInstance; - public static ValueFilter getDefaultInstance() { - return defaultInstance; - } - - public ValueFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .CompareFilter compareFilter = 1; - public static final int COMPAREFILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; - } - - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCompareFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCompareFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) obj; - - boolean result = true; - result = result && (hasCompareFilter() == other.hasCompareFilter()); - if (hasCompareFilter()) { - result = result && getCompareFilter() - .equals(other.getCompareFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCompareFilter()) { - hash = (37 * hash) + COMPAREFILTER_FIELD_NUMBER; - hash = (53 * hash) + getCompareFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getCompareFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (compareFilterBuilder_ == null) { - result.compareFilter_ = compareFilter_; - } else { - result.compareFilter_ = compareFilterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance()) return this; - if (other.hasCompareFilter()) { - mergeCompareFilter(other.getCompareFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCompareFilter()) { - - return false; - } - if (!getCompareFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(); - if (hasCompareFilter()) { - subBuilder.mergeFrom(getCompareFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setCompareFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .CompareFilter compareFilter = 1; - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; - public boolean hasCompareFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - if (compareFilterBuilder_ == null) { - return compareFilter_; - } else { - return compareFilterBuilder_.getMessage(); - } - } - public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - compareFilter_ = value; - onChanged(); - } else { - compareFilterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setCompareFilter( - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { - if (compareFilterBuilder_ == null) { - compareFilter_ = builderForValue.build(); - onChanged(); - } else { - compareFilterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { - if (compareFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { - compareFilter_ = - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); - } else { - compareFilter_ = value; - } - onChanged(); - } else { - compareFilterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearCompareFilter() { - if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - onChanged(); - } else { - compareFilterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getCompareFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - if (compareFilterBuilder_ != null) { - return compareFilterBuilder_.getMessageOrBuilder(); - } else { - return compareFilter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> - getCompareFilterFieldBuilder() { - if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, - getParentForChildren(), - isClean()); - compareFilter_ = null; - } - return compareFilterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ValueFilter) - } - - static { - defaultInstance = new ValueFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ValueFilter) - } - - public interface WhileMatchFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .Filter filter = 1; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder(); - } - public static final class WhileMatchFilter extends - com.google.protobuf.GeneratedMessage - implements WhileMatchFilterOrBuilder { - // Use WhileMatchFilter.newBuilder() to construct. - private WhileMatchFilter(Builder builder) { - super(builder); - } - private WhileMatchFilter(boolean noInit) {} - - private static final WhileMatchFilter defaultInstance; - public static WhileMatchFilter getDefaultInstance() { - return defaultInstance; - } - - public WhileMatchFilter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable; - } - - private int bitField0_; - // required .Filter filter = 1; - public static final int FILTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; - } - - private void initFields() { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFilter()) { - memoizedIsInitialized = 0; - return false; - } - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, filter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, filter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) obj; - - boolean result = true; - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFilterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter build() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance()) return this; - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFilter()) { - - return false; - } - if (!getFilter().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .Filter filter = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder getFilterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WhileMatchFilter) - } - - static { - defaultInstance = new WhileMatchFilter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WhileMatchFilter) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnCountGetFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnCountGetFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnPaginationFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnPaginationFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnPrefixFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnPrefixFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnRangeFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnRangeFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CompareFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CompareFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DependentColumnFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DependentColumnFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FamilyFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FamilyFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FilterList_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FilterList_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FilterWrapper_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FilterWrapper_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FirstKeyOnlyFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FirstKeyOnlyFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FuzzyRowFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FuzzyRowFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_InclusiveStopFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_InclusiveStopFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_KeyOnlyFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_KeyOnlyFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultipleColumnPrefixFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultipleColumnPrefixFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_PageFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_PageFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_PrefixFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_PrefixFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_QualifierFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_QualifierFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RandomRowFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RandomRowFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RowFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RowFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SingleColumnValueExcludeFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SingleColumnValueFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SingleColumnValueFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SkipFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SkipFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TimestampsFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TimestampsFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ValueFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ValueFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WhileMatchFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WhileMatchFilter_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\014Filter.proto\032\013hbase.proto\032\020Comparator." + - "proto\"%\n\024ColumnCountGetFilter\022\r\n\005limit\030\001" + - " \002(\005\"7\n\026ColumnPaginationFilter\022\r\n\005limit\030" + - "\001 \002(\005\022\016\n\006offset\030\002 \001(\005\"$\n\022ColumnPrefixFil" + - "ter\022\016\n\006prefix\030\001 \002(\014\"q\n\021ColumnRangeFilter" + - "\022\021\n\tminColumn\030\001 \001(\014\022\032\n\022minColumnInclusiv" + - "e\030\002 \001(\010\022\021\n\tmaxColumn\030\003 \001(\014\022\032\n\022maxColumnI" + - "nclusive\030\004 \001(\010\"Q\n\rCompareFilter\022\037\n\tcompa" + - "reOp\030\001 \002(\0162\014.CompareType\022\037\n\ncomparator\030\002" + - " \001(\0132\013.Comparator\"\212\001\n\025DependentColumnFil", - "ter\022%\n\rcompareFilter\030\001 \002(\0132\016.CompareFilt" + - "er\022\024\n\014columnFamily\030\002 \001(\014\022\027\n\017columnQualif" + - "ier\030\003 \001(\014\022\033\n\023dropDependentColumn\030\004 \001(\010\"5" + - "\n\014FamilyFilter\022%\n\rcompareFilter\030\001 \002(\0132\016." + - "CompareFilter\"\200\001\n\nFilterList\022&\n\010operator" + - "\030\001 \002(\0162\024.FilterList.Operator\022\030\n\007filters\030" + - "\002 \003(\0132\007.Filter\"0\n\010Operator\022\021\n\rMUST_PASS_" + - "ALL\020\001\022\021\n\rMUST_PASS_ONE\020\002\"(\n\rFilterWrappe" + - "r\022\027\n\006filter\030\001 \002(\0132\007.Filter\"\024\n\022FirstKeyOn" + - "lyFilter\";\n%FirstKeyValueMatchingQualifi", - "ersFilter\022\022\n\nqualifiers\030\001 \003(\014\"8\n\016FuzzyRo" + - "wFilter\022&\n\rfuzzyKeysData\030\001 \003(\0132\017.BytesBy" + - "tesPair\")\n\023InclusiveStopFilter\022\022\n\nstopRo" + - "wKey\030\001 \001(\014\"!\n\rKeyOnlyFilter\022\020\n\010lenAsVal\030" + - "\001 \002(\010\"4\n\032MultipleColumnPrefixFilter\022\026\n\016s" + - "ortedPrefixes\030\001 \003(\014\"\036\n\nPageFilter\022\020\n\010pag" + - "eSize\030\001 \002(\003\"\036\n\014PrefixFilter\022\016\n\006prefix\030\001 " + - "\001(\014\"8\n\017QualifierFilter\022%\n\rcompareFilter\030" + - "\001 \002(\0132\016.CompareFilter\"!\n\017RandomRowFilter" + - "\022\016\n\006chance\030\001 \002(\002\"2\n\tRowFilter\022%\n\rcompare", - "Filter\030\001 \002(\0132\016.CompareFilter\"[\n\036SingleCo" + - "lumnValueExcludeFilter\0229\n\027singleColumnVa" + - "lueFilter\030\001 \002(\0132\030.SingleColumnValueFilte" + - "r\"\352\001\n\027SingleColumnValueFilter\022\024\n\014columnF" + - "amily\030\001 \001(\014\022\027\n\017columnQualifier\030\002 \001(\014\022\037\n\t" + - "compareOp\030\003 \002(\0162\014.CompareType\022\037\n\ncompara" + - "tor\030\004 \002(\0132\013.Comparator\022\023\n\013foundColumn\030\005 " + - "\001(\010\022\025\n\rmatchedColumn\030\006 \001(\010\022\027\n\017filterIfMi" + - "ssing\030\007 \001(\010\022\031\n\021latestVersionOnly\030\010 \001(\010\"%" + - "\n\nSkipFilter\022\027\n\006filter\030\001 \002(\0132\007.Filter\"&\n", - "\020TimestampsFilter\022\022\n\ntimestamps\030\001 \003(\003\"4\n" + - "\013ValueFilter\022%\n\rcompareFilter\030\001 \002(\0132\016.Co" + - "mpareFilter\"+\n\020WhileMatchFilter\022\027\n\006filte" + - "r\030\001 \002(\0132\007.FilterBB\n*org.apache.hadoop.hb" + - "ase.protobuf.generatedB\014FilterProtosH\001\210\001" + - "\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_ColumnCountGetFilter_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_ColumnCountGetFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnCountGetFilter_descriptor, - new java.lang.String[] { "Limit", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); - internal_static_ColumnPaginationFilter_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_ColumnPaginationFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnPaginationFilter_descriptor, - new java.lang.String[] { "Limit", "Offset", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); - internal_static_ColumnPrefixFilter_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_ColumnPrefixFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnPrefixFilter_descriptor, - new java.lang.String[] { "Prefix", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); - internal_static_ColumnRangeFilter_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_ColumnRangeFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnRangeFilter_descriptor, - new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); - internal_static_CompareFilter_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_CompareFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CompareFilter_descriptor, - new java.lang.String[] { "CompareOp", "Comparator", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); - internal_static_DependentColumnFilter_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_DependentColumnFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DependentColumnFilter_descriptor, - new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); - internal_static_FamilyFilter_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_FamilyFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FamilyFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); - internal_static_FilterList_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_FilterList_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FilterList_descriptor, - new java.lang.String[] { "Operator", "Filters", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); - internal_static_FilterWrapper_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_FilterWrapper_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FilterWrapper_descriptor, - new java.lang.String[] { "Filter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); - internal_static_FirstKeyOnlyFilter_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_FirstKeyOnlyFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FirstKeyOnlyFilter_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); - internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor, - new java.lang.String[] { "Qualifiers", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); - internal_static_FuzzyRowFilter_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_FuzzyRowFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FuzzyRowFilter_descriptor, - new java.lang.String[] { "FuzzyKeysData", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); - internal_static_InclusiveStopFilter_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_InclusiveStopFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_InclusiveStopFilter_descriptor, - new java.lang.String[] { "StopRowKey", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); - internal_static_KeyOnlyFilter_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_KeyOnlyFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_KeyOnlyFilter_descriptor, - new java.lang.String[] { "LenAsVal", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); - internal_static_MultipleColumnPrefixFilter_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_MultipleColumnPrefixFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultipleColumnPrefixFilter_descriptor, - new java.lang.String[] { "SortedPrefixes", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); - internal_static_PageFilter_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_PageFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_PageFilter_descriptor, - new java.lang.String[] { "PageSize", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class); - internal_static_PrefixFilter_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_PrefixFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_PrefixFilter_descriptor, - new java.lang.String[] { "Prefix", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); - internal_static_QualifierFilter_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_QualifierFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_QualifierFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); - internal_static_RandomRowFilter_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_RandomRowFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RandomRowFilter_descriptor, - new java.lang.String[] { "Chance", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); - internal_static_RowFilter_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_RowFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RowFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class); - internal_static_SingleColumnValueExcludeFilter_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SingleColumnValueExcludeFilter_descriptor, - new java.lang.String[] { "SingleColumnValueFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); - internal_static_SingleColumnValueFilter_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_SingleColumnValueFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SingleColumnValueFilter_descriptor, - new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FoundColumn", "MatchedColumn", "FilterIfMissing", "LatestVersionOnly", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); - internal_static_SkipFilter_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_SkipFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SkipFilter_descriptor, - new java.lang.String[] { "Filter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class); - internal_static_TimestampsFilter_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_TimestampsFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TimestampsFilter_descriptor, - new java.lang.String[] { "Timestamps", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); - internal_static_ValueFilter_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_ValueFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ValueFilter_descriptor, - new java.lang.String[] { "CompareFilter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class); - internal_static_WhileMatchFilter_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_WhileMatchFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WhileMatchFilter_descriptor, - new java.lang.String[] { "Filter", }, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, - org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java deleted file mode 100644 index 6d37719..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ /dev/null @@ -1,11407 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: hbase.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class HBaseProtos { - private HBaseProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public enum CompareType - implements com.google.protobuf.ProtocolMessageEnum { - LESS(0, 0), - LESS_OR_EQUAL(1, 1), - EQUAL(2, 2), - NOT_EQUAL(3, 3), - GREATER_OR_EQUAL(4, 4), - GREATER(5, 5), - NO_OP(6, 6), - ; - - public static final int LESS_VALUE = 0; - public static final int LESS_OR_EQUAL_VALUE = 1; - public static final int EQUAL_VALUE = 2; - public static final int NOT_EQUAL_VALUE = 3; - public static final int GREATER_OR_EQUAL_VALUE = 4; - public static final int GREATER_VALUE = 5; - public static final int NO_OP_VALUE = 6; - - - public final int getNumber() { return value; } - - public static CompareType valueOf(int value) { - switch (value) { - case 0: return LESS; - case 1: return LESS_OR_EQUAL; - case 2: return EQUAL; - case 3: return NOT_EQUAL; - case 4: return GREATER_OR_EQUAL; - case 5: return GREATER; - case 6: return NO_OP; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public CompareType findValueByNumber(int number) { - return CompareType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); - } - - private static final CompareType[] VALUES = { - LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, - }; - - public static CompareType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private CompareType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:CompareType) - } - - public enum KeyType - implements com.google.protobuf.ProtocolMessageEnum { - MINIMUM(0, 0), - PUT(1, 4), - DELETE(2, 8), - DELETE_COLUMN(3, 12), - DELETE_FAMILY(4, 14), - MAXIMUM(5, 255), - ; - - public static final int MINIMUM_VALUE = 0; - public static final int PUT_VALUE = 4; - public static final int DELETE_VALUE = 8; - public static final int DELETE_COLUMN_VALUE = 12; - public static final int DELETE_FAMILY_VALUE = 14; - public static final int MAXIMUM_VALUE = 255; - - - public final int getNumber() { return value; } - - public static KeyType valueOf(int value) { - switch (value) { - case 0: return MINIMUM; - case 4: return PUT; - case 8: return DELETE; - case 12: return DELETE_COLUMN; - case 14: return DELETE_FAMILY; - case 255: return MAXIMUM; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public KeyType findValueByNumber(int number) { - return KeyType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1); - } - - private static final KeyType[] VALUES = { - MINIMUM, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, MAXIMUM, - }; - - public static KeyType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private KeyType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:KeyType) - } - - public interface TableSchemaOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes name = 1; - boolean hasName(); - com.google.protobuf.ByteString getName(); - - // repeated .TableSchema.Attribute attributes = 2; - java.util.List - getAttributesList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getAttributes(int index); - int getAttributesCount(); - java.util.List - getAttributesOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder getAttributesOrBuilder( - int index); - - // repeated .ColumnFamilySchema columnFamilies = 3; - java.util.List - getColumnFamiliesList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index); - int getColumnFamiliesCount(); - java.util.List - getColumnFamiliesOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( - int index); - } - public static final class TableSchema extends - com.google.protobuf.GeneratedMessage - implements TableSchemaOrBuilder { - // Use TableSchema.newBuilder() to construct. - private TableSchema(Builder builder) { - super(builder); - } - private TableSchema(boolean noInit) {} - - private static final TableSchema defaultInstance; - public static TableSchema getDefaultInstance() { - return defaultInstance; - } - - public TableSchema getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable; - } - - public interface AttributeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes name = 1; - boolean hasName(); - com.google.protobuf.ByteString getName(); - - // required bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class Attribute extends - com.google.protobuf.GeneratedMessage - implements AttributeOrBuilder { - // Use Attribute.newBuilder() to construct. - private Attribute(Builder builder) { - super(builder); - } - private Attribute(boolean noInit) {} - - private static final Attribute defaultInstance; - public static Attribute getDefaultInstance() { - return defaultInstance; - } - - public Attribute getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_fieldAccessorTable; - } - - private int bitField0_; - // required bytes name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - - // required bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - name_ = com.google.protobuf.ByteString.EMPTY; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, name_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, name_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_Attribute_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes name = 1; - private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - public Builder setName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - - // required bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:TableSchema.Attribute) - } - - static { - defaultInstance = new Attribute(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TableSchema.Attribute) - } - - private int bitField0_; - // optional bytes name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - - // repeated .TableSchema.Attribute attributes = 2; - public static final int ATTRIBUTES_FIELD_NUMBER = 2; - private java.util.List attributes_; - public java.util.List getAttributesList() { - return attributes_; - } - public java.util.List - getAttributesOrBuilderList() { - return attributes_; - } - public int getAttributesCount() { - return attributes_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getAttributes(int index) { - return attributes_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder getAttributesOrBuilder( - int index) { - return attributes_.get(index); - } - - // repeated .ColumnFamilySchema columnFamilies = 3; - public static final int COLUMNFAMILIES_FIELD_NUMBER = 3; - private java.util.List columnFamilies_; - public java.util.List getColumnFamiliesList() { - return columnFamilies_; - } - public java.util.List - getColumnFamiliesOrBuilderList() { - return columnFamilies_; - } - public int getColumnFamiliesCount() { - return columnFamilies_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { - return columnFamilies_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( - int index) { - return columnFamilies_.get(index); - } - - private void initFields() { - name_ = com.google.protobuf.ByteString.EMPTY; - attributes_ = java.util.Collections.emptyList(); - columnFamilies_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getAttributesCount(); i++) { - if (!getAttributes(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getColumnFamiliesCount(); i++) { - if (!getColumnFamilies(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, name_); - } - for (int i = 0; i < attributes_.size(); i++) { - output.writeMessage(2, attributes_.get(i)); - } - for (int i = 0; i < columnFamilies_.size(); i++) { - output.writeMessage(3, columnFamilies_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, name_); - } - for (int i = 0; i < attributes_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, attributes_.get(i)); - } - for (int i = 0; i < columnFamilies_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, columnFamilies_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && getAttributesList() - .equals(other.getAttributesList()); - result = result && getColumnFamiliesList() - .equals(other.getColumnFamiliesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (getAttributesCount() > 0) { - hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; - hash = (53 * hash) + getAttributesList().hashCode(); - } - if (getColumnFamiliesCount() > 0) { - hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamiliesList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getAttributesFieldBuilder(); - getColumnFamiliesFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (attributesBuilder_ == null) { - attributes_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - attributesBuilder_.clear(); - } - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - columnFamiliesBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (attributesBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - attributes_ = java.util.Collections.unmodifiableList(attributes_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.attributes_ = attributes_; - } else { - result.attributes_ = attributesBuilder_.build(); - } - if (columnFamiliesBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.columnFamilies_ = columnFamilies_; - } else { - result.columnFamilies_ = columnFamiliesBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (attributesBuilder_ == null) { - if (!other.attributes_.isEmpty()) { - if (attributes_.isEmpty()) { - attributes_ = other.attributes_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureAttributesIsMutable(); - attributes_.addAll(other.attributes_); - } - onChanged(); - } - } else { - if (!other.attributes_.isEmpty()) { - if (attributesBuilder_.isEmpty()) { - attributesBuilder_.dispose(); - attributesBuilder_ = null; - attributes_ = other.attributes_; - bitField0_ = (bitField0_ & ~0x00000002); - attributesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributesFieldBuilder() : null; - } else { - attributesBuilder_.addAllMessages(other.attributes_); - } - } - } - if (columnFamiliesBuilder_ == null) { - if (!other.columnFamilies_.isEmpty()) { - if (columnFamilies_.isEmpty()) { - columnFamilies_ = other.columnFamilies_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureColumnFamiliesIsMutable(); - columnFamilies_.addAll(other.columnFamilies_); - } - onChanged(); - } - } else { - if (!other.columnFamilies_.isEmpty()) { - if (columnFamiliesBuilder_.isEmpty()) { - columnFamiliesBuilder_.dispose(); - columnFamiliesBuilder_ = null; - columnFamilies_ = other.columnFamilies_; - bitField0_ = (bitField0_ & ~0x00000004); - columnFamiliesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnFamiliesFieldBuilder() : null; - } else { - columnFamiliesBuilder_.addAllMessages(other.columnFamilies_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getAttributesCount(); i++) { - if (!getAttributes(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getColumnFamiliesCount(); i++) { - if (!getColumnFamilies(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttributes(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumnFamilies(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes name = 1; - private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - public Builder setName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - - // repeated .TableSchema.Attribute attributes = 2; - private java.util.List attributes_ = - java.util.Collections.emptyList(); - private void ensureAttributesIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - attributes_ = new java.util.ArrayList(attributes_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder> attributesBuilder_; - - public java.util.List getAttributesList() { - if (attributesBuilder_ == null) { - return java.util.Collections.unmodifiableList(attributes_); - } else { - return attributesBuilder_.getMessageList(); - } - } - public int getAttributesCount() { - if (attributesBuilder_ == null) { - return attributes_.size(); - } else { - return attributesBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute getAttributes(int index) { - if (attributesBuilder_ == null) { - return attributes_.get(index); - } else { - return attributesBuilder_.getMessage(index); - } - } - public Builder setAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute value) { - if (attributesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributesIsMutable(); - attributes_.set(index, value); - onChanged(); - } else { - attributesBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder builderForValue) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.set(index, builderForValue.build()); - onChanged(); - } else { - attributesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute value) { - if (attributesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributesIsMutable(); - attributes_.add(value); - onChanged(); - } else { - attributesBuilder_.addMessage(value); - } - return this; - } - public Builder addAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute value) { - if (attributesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributesIsMutable(); - attributes_.add(index, value); - onChanged(); - } else { - attributesBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttributes( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder builderForValue) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.add(builderForValue.build()); - onChanged(); - } else { - attributesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder builderForValue) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.add(index, builderForValue.build()); - onChanged(); - } else { - attributesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttributes( - java.lang.Iterable values) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - super.addAll(values, attributes_); - onChanged(); - } else { - attributesBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttributes() { - if (attributesBuilder_ == null) { - attributes_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - attributesBuilder_.clear(); - } - return this; - } - public Builder removeAttributes(int index) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.remove(index); - onChanged(); - } else { - attributesBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder getAttributesBuilder( - int index) { - return getAttributesFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder getAttributesOrBuilder( - int index) { - if (attributesBuilder_ == null) { - return attributes_.get(index); } else { - return attributesBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributesOrBuilderList() { - if (attributesBuilder_ != null) { - return attributesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attributes_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder addAttributesBuilder() { - return getAttributesFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder addAttributesBuilder( - int index) { - return getAttributesFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.getDefaultInstance()); - } - public java.util.List - getAttributesBuilderList() { - return getAttributesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder> - getAttributesFieldBuilder() { - if (attributesBuilder_ == null) { - attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.AttributeOrBuilder>( - attributes_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - attributes_ = null; - } - return attributesBuilder_; - } - - // repeated .ColumnFamilySchema columnFamilies = 3; - private java.util.List columnFamilies_ = - java.util.Collections.emptyList(); - private void ensureColumnFamiliesIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - columnFamilies_ = new java.util.ArrayList(columnFamilies_); - bitField0_ |= 0x00000004; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; - - public java.util.List getColumnFamiliesList() { - if (columnFamiliesBuilder_ == null) { - return java.util.Collections.unmodifiableList(columnFamilies_); - } else { - return columnFamiliesBuilder_.getMessageList(); - } - } - public int getColumnFamiliesCount() { - if (columnFamiliesBuilder_ == null) { - return columnFamilies_.size(); - } else { - return columnFamiliesBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { - if (columnFamiliesBuilder_ == null) { - return columnFamilies_.get(index); - } else { - return columnFamiliesBuilder_.getMessage(index); - } - } - public Builder setColumnFamilies( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnFamiliesIsMutable(); - columnFamilies_.set(index, value); - onChanged(); - } else { - columnFamiliesBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumnFamilies( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { - if (columnFamiliesBuilder_ == null) { - ensureColumnFamiliesIsMutable(); - columnFamilies_.set(index, builderForValue.build()); - onChanged(); - } else { - columnFamiliesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnFamiliesIsMutable(); - columnFamilies_.add(value); - onChanged(); - } else { - columnFamiliesBuilder_.addMessage(value); - } - return this; - } - public Builder addColumnFamilies( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnFamiliesIsMutable(); - columnFamilies_.add(index, value); - onChanged(); - } else { - columnFamiliesBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumnFamilies( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { - if (columnFamiliesBuilder_ == null) { - ensureColumnFamiliesIsMutable(); - columnFamilies_.add(builderForValue.build()); - onChanged(); - } else { - columnFamiliesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumnFamilies( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { - if (columnFamiliesBuilder_ == null) { - ensureColumnFamiliesIsMutable(); - columnFamilies_.add(index, builderForValue.build()); - onChanged(); - } else { - columnFamiliesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumnFamilies( - java.lang.Iterable values) { - if (columnFamiliesBuilder_ == null) { - ensureColumnFamiliesIsMutable(); - super.addAll(values, columnFamilies_); - onChanged(); - } else { - columnFamiliesBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumnFamilies() { - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - columnFamiliesBuilder_.clear(); - } - return this; - } - public Builder removeColumnFamilies(int index) { - if (columnFamiliesBuilder_ == null) { - ensureColumnFamiliesIsMutable(); - columnFamilies_.remove(index); - onChanged(); - } else { - columnFamiliesBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder( - int index) { - return getColumnFamiliesFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( - int index) { - if (columnFamiliesBuilder_ == null) { - return columnFamilies_.get(index); } else { - return columnFamiliesBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnFamiliesOrBuilderList() { - if (columnFamiliesBuilder_ != null) { - return columnFamiliesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(columnFamilies_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() { - return getColumnFamiliesFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder( - int index) { - return getColumnFamiliesFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); - } - public java.util.List - getColumnFamiliesBuilderList() { - return getColumnFamiliesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> - getColumnFamiliesFieldBuilder() { - if (columnFamiliesBuilder_ == null) { - columnFamiliesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnFamilies_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - columnFamilies_ = null; - } - return columnFamiliesBuilder_; - } - - // @@protoc_insertion_point(builder_scope:TableSchema) - } - - static { - defaultInstance = new TableSchema(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TableSchema) - } - - public interface ColumnFamilySchemaOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes name = 1; - boolean hasName(); - com.google.protobuf.ByteString getName(); - - // repeated .ColumnFamilySchema.Attribute attributes = 2; - java.util.List - getAttributesList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getAttributes(int index); - int getAttributesCount(); - java.util.List - getAttributesOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder getAttributesOrBuilder( - int index); - } - public static final class ColumnFamilySchema extends - com.google.protobuf.GeneratedMessage - implements ColumnFamilySchemaOrBuilder { - // Use ColumnFamilySchema.newBuilder() to construct. - private ColumnFamilySchema(Builder builder) { - super(builder); - } - private ColumnFamilySchema(boolean noInit) {} - - private static final ColumnFamilySchema defaultInstance; - public static ColumnFamilySchema getDefaultInstance() { - return defaultInstance; - } - - public ColumnFamilySchema getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable; - } - - public interface AttributeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes name = 1; - boolean hasName(); - com.google.protobuf.ByteString getName(); - - // required bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class Attribute extends - com.google.protobuf.GeneratedMessage - implements AttributeOrBuilder { - // Use Attribute.newBuilder() to construct. - private Attribute(Builder builder) { - super(builder); - } - private Attribute(boolean noInit) {} - - private static final Attribute defaultInstance; - public static Attribute getDefaultInstance() { - return defaultInstance; - } - - public Attribute getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable; - } - - private int bitField0_; - // required bytes name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - - // required bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - name_ = com.google.protobuf.ByteString.EMPTY; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, name_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, name_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes name = 1; - private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - public Builder setName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - - // required bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnFamilySchema.Attribute) - } - - static { - defaultInstance = new Attribute(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnFamilySchema.Attribute) - } - - private int bitField0_; - // required bytes name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - - // repeated .ColumnFamilySchema.Attribute attributes = 2; - public static final int ATTRIBUTES_FIELD_NUMBER = 2; - private java.util.List attributes_; - public java.util.List getAttributesList() { - return attributes_; - } - public java.util.List - getAttributesOrBuilderList() { - return attributes_; - } - public int getAttributesCount() { - return attributes_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getAttributes(int index) { - return attributes_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder getAttributesOrBuilder( - int index) { - return attributes_.get(index); - } - - private void initFields() { - name_ = com.google.protobuf.ByteString.EMPTY; - attributes_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getAttributesCount(); i++) { - if (!getAttributes(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, name_); - } - for (int i = 0; i < attributes_.size(); i++) { - output.writeMessage(2, attributes_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, name_); - } - for (int i = 0; i < attributes_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, attributes_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && getAttributesList() - .equals(other.getAttributesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (getAttributesCount() > 0) { - hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; - hash = (53 * hash) + getAttributesList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getAttributesFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (attributesBuilder_ == null) { - attributes_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - attributesBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (attributesBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - attributes_ = java.util.Collections.unmodifiableList(attributes_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.attributes_ = attributes_; - } else { - result.attributes_ = attributesBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (attributesBuilder_ == null) { - if (!other.attributes_.isEmpty()) { - if (attributes_.isEmpty()) { - attributes_ = other.attributes_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureAttributesIsMutable(); - attributes_.addAll(other.attributes_); - } - onChanged(); - } - } else { - if (!other.attributes_.isEmpty()) { - if (attributesBuilder_.isEmpty()) { - attributesBuilder_.dispose(); - attributesBuilder_ = null; - attributes_ = other.attributes_; - bitField0_ = (bitField0_ & ~0x00000002); - attributesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributesFieldBuilder() : null; - } else { - attributesBuilder_.addAllMessages(other.attributes_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - for (int i = 0; i < getAttributesCount(); i++) { - if (!getAttributes(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttributes(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes name = 1; - private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getName() { - return name_; - } - public Builder setName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - - // repeated .ColumnFamilySchema.Attribute attributes = 2; - private java.util.List attributes_ = - java.util.Collections.emptyList(); - private void ensureAttributesIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - attributes_ = new java.util.ArrayList(attributes_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder> attributesBuilder_; - - public java.util.List getAttributesList() { - if (attributesBuilder_ == null) { - return java.util.Collections.unmodifiableList(attributes_); - } else { - return attributesBuilder_.getMessageList(); - } - } - public int getAttributesCount() { - if (attributesBuilder_ == null) { - return attributes_.size(); - } else { - return attributesBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute getAttributes(int index) { - if (attributesBuilder_ == null) { - return attributes_.get(index); - } else { - return attributesBuilder_.getMessage(index); - } - } - public Builder setAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute value) { - if (attributesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributesIsMutable(); - attributes_.set(index, value); - onChanged(); - } else { - attributesBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder builderForValue) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.set(index, builderForValue.build()); - onChanged(); - } else { - attributesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute value) { - if (attributesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributesIsMutable(); - attributes_.add(value); - onChanged(); - } else { - attributesBuilder_.addMessage(value); - } - return this; - } - public Builder addAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute value) { - if (attributesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributesIsMutable(); - attributes_.add(index, value); - onChanged(); - } else { - attributesBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttributes( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder builderForValue) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.add(builderForValue.build()); - onChanged(); - } else { - attributesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttributes( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder builderForValue) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.add(index, builderForValue.build()); - onChanged(); - } else { - attributesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttributes( - java.lang.Iterable values) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - super.addAll(values, attributes_); - onChanged(); - } else { - attributesBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttributes() { - if (attributesBuilder_ == null) { - attributes_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - attributesBuilder_.clear(); - } - return this; - } - public Builder removeAttributes(int index) { - if (attributesBuilder_ == null) { - ensureAttributesIsMutable(); - attributes_.remove(index); - onChanged(); - } else { - attributesBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder getAttributesBuilder( - int index) { - return getAttributesFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder getAttributesOrBuilder( - int index) { - if (attributesBuilder_ == null) { - return attributes_.get(index); } else { - return attributesBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributesOrBuilderList() { - if (attributesBuilder_ != null) { - return attributesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attributes_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder addAttributesBuilder() { - return getAttributesFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder addAttributesBuilder( - int index) { - return getAttributesFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.getDefaultInstance()); - } - public java.util.List - getAttributesBuilderList() { - return getAttributesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder> - getAttributesFieldBuilder() { - if (attributesBuilder_ == null) { - attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.AttributeOrBuilder>( - attributes_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - attributes_ = null; - } - return attributesBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ColumnFamilySchema) - } - - static { - defaultInstance = new ColumnFamilySchema(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnFamilySchema) - } - - public interface RegionInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 regionId = 1; - boolean hasRegionId(); - long getRegionId(); - - // required bytes tableName = 2; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // optional bytes startKey = 3; - boolean hasStartKey(); - com.google.protobuf.ByteString getStartKey(); - - // optional bytes endKey = 4; - boolean hasEndKey(); - com.google.protobuf.ByteString getEndKey(); - - // optional bool offline = 5; - boolean hasOffline(); - boolean getOffline(); - - // optional bool split = 6; - boolean hasSplit(); - boolean getSplit(); - } - public static final class RegionInfo extends - com.google.protobuf.GeneratedMessage - implements RegionInfoOrBuilder { - // Use RegionInfo.newBuilder() to construct. - private RegionInfo(Builder builder) { - super(builder); - } - private RegionInfo(boolean noInit) {} - - private static final RegionInfo defaultInstance; - public static RegionInfo getDefaultInstance() { - return defaultInstance; - } - - public RegionInfo getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 regionId = 1; - public static final int REGIONID_FIELD_NUMBER = 1; - private long regionId_; - public boolean hasRegionId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getRegionId() { - return regionId_; - } - - // required bytes tableName = 2; - public static final int TABLENAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // optional bytes startKey = 3; - public static final int STARTKEY_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString startKey_; - public boolean hasStartKey() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getStartKey() { - return startKey_; - } - - // optional bytes endKey = 4; - public static final int ENDKEY_FIELD_NUMBER = 4; - private com.google.protobuf.ByteString endKey_; - public boolean hasEndKey() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getEndKey() { - return endKey_; - } - - // optional bool offline = 5; - public static final int OFFLINE_FIELD_NUMBER = 5; - private boolean offline_; - public boolean hasOffline() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getOffline() { - return offline_; - } - - // optional bool split = 6; - public static final int SPLIT_FIELD_NUMBER = 6; - private boolean split_; - public boolean hasSplit() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getSplit() { - return split_; - } - - private void initFields() { - regionId_ = 0L; - tableName_ = com.google.protobuf.ByteString.EMPTY; - startKey_ = com.google.protobuf.ByteString.EMPTY; - endKey_ = com.google.protobuf.ByteString.EMPTY; - offline_ = false; - split_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionId()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, regionId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, startKey_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, endKey_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(5, offline_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(6, split_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, regionId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, startKey_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, endKey_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(5, offline_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(6, split_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) obj; - - boolean result = true; - result = result && (hasRegionId() == other.hasRegionId()); - if (hasRegionId()) { - result = result && (getRegionId() - == other.getRegionId()); - } - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasStartKey() == other.hasStartKey()); - if (hasStartKey()) { - result = result && getStartKey() - .equals(other.getStartKey()); - } - result = result && (hasEndKey() == other.hasEndKey()); - if (hasEndKey()) { - result = result && getEndKey() - .equals(other.getEndKey()); - } - result = result && (hasOffline() == other.hasOffline()); - if (hasOffline()) { - result = result && (getOffline() - == other.getOffline()); - } - result = result && (hasSplit() == other.hasSplit()); - if (hasSplit()) { - result = result && (getSplit() - == other.getSplit()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionId()) { - hash = (37 * hash) + REGIONID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getRegionId()); - } - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasStartKey()) { - hash = (37 * hash) + STARTKEY_FIELD_NUMBER; - hash = (53 * hash) + getStartKey().hashCode(); - } - if (hasEndKey()) { - hash = (37 * hash) + ENDKEY_FIELD_NUMBER; - hash = (53 * hash) + getEndKey().hashCode(); - } - if (hasOffline()) { - hash = (37 * hash) + OFFLINE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOffline()); - } - if (hasSplit()) { - hash = (37 * hash) + SPLIT_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSplit()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - regionId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - startKey_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - endKey_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - offline_ = false; - bitField0_ = (bitField0_ & ~0x00000010); - split_ = false; - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.regionId_ = regionId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.startKey_ = startKey_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.endKey_ = endKey_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.offline_ = offline_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.split_ = split_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; - if (other.hasRegionId()) { - setRegionId(other.getRegionId()); - } - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasStartKey()) { - setStartKey(other.getStartKey()); - } - if (other.hasEndKey()) { - setEndKey(other.getEndKey()); - } - if (other.hasOffline()) { - setOffline(other.getOffline()); - } - if (other.hasSplit()) { - setSplit(other.getSplit()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionId()) { - - return false; - } - if (!hasTableName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - regionId_ = input.readUInt64(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - startKey_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - endKey_ = input.readBytes(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - offline_ = input.readBool(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - split_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 regionId = 1; - private long regionId_ ; - public boolean hasRegionId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getRegionId() { - return regionId_; - } - public Builder setRegionId(long value) { - bitField0_ |= 0x00000001; - regionId_ = value; - onChanged(); - return this; - } - public Builder clearRegionId() { - bitField0_ = (bitField0_ & ~0x00000001); - regionId_ = 0L; - onChanged(); - return this; - } - - // required bytes tableName = 2; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000002); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // optional bytes startKey = 3; - private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasStartKey() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getStartKey() { - return startKey_; - } - public Builder setStartKey(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - startKey_ = value; - onChanged(); - return this; - } - public Builder clearStartKey() { - bitField0_ = (bitField0_ & ~0x00000004); - startKey_ = getDefaultInstance().getStartKey(); - onChanged(); - return this; - } - - // optional bytes endKey = 4; - private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasEndKey() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getEndKey() { - return endKey_; - } - public Builder setEndKey(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - endKey_ = value; - onChanged(); - return this; - } - public Builder clearEndKey() { - bitField0_ = (bitField0_ & ~0x00000008); - endKey_ = getDefaultInstance().getEndKey(); - onChanged(); - return this; - } - - // optional bool offline = 5; - private boolean offline_ ; - public boolean hasOffline() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getOffline() { - return offline_; - } - public Builder setOffline(boolean value) { - bitField0_ |= 0x00000010; - offline_ = value; - onChanged(); - return this; - } - public Builder clearOffline() { - bitField0_ = (bitField0_ & ~0x00000010); - offline_ = false; - onChanged(); - return this; - } - - // optional bool split = 6; - private boolean split_ ; - public boolean hasSplit() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getSplit() { - return split_; - } - public Builder setSplit(boolean value) { - bitField0_ |= 0x00000020; - split_ = value; - onChanged(); - return this; - } - public Builder clearSplit() { - bitField0_ = (bitField0_ & ~0x00000020); - split_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RegionInfo) - } - - static { - defaultInstance = new RegionInfo(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionInfo) - } - - public interface RegionSpecifierOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier.RegionSpecifierType type = 1; - boolean hasType(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); - - // required bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class RegionSpecifier extends - com.google.protobuf.GeneratedMessage - implements RegionSpecifierOrBuilder { - // Use RegionSpecifier.newBuilder() to construct. - private RegionSpecifier(Builder builder) { - super(builder); - } - private RegionSpecifier(boolean noInit) {} - - private static final RegionSpecifier defaultInstance; - public static RegionSpecifier getDefaultInstance() { - return defaultInstance; - } - - public RegionSpecifier getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; - } - - public enum RegionSpecifierType - implements com.google.protobuf.ProtocolMessageEnum { - REGION_NAME(0, 1), - ENCODED_REGION_NAME(1, 2), - ; - - public static final int REGION_NAME_VALUE = 1; - public static final int ENCODED_REGION_NAME_VALUE = 2; - - - public final int getNumber() { return value; } - - public static RegionSpecifierType valueOf(int value) { - switch (value) { - case 1: return REGION_NAME; - case 2: return ENCODED_REGION_NAME; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public RegionSpecifierType findValueByNumber(int number) { - return RegionSpecifierType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); - } - - private static final RegionSpecifierType[] VALUES = { - REGION_NAME, ENCODED_REGION_NAME, - }; - - public static RegionSpecifierType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private RegionSpecifierType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:RegionSpecifier.RegionSpecifierType) - } - - private int bitField0_; - // required .RegionSpecifier.RegionSpecifierType type = 1; - public static final int TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; - public boolean hasType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { - return type_; - } - - // required bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, type_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, type_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) obj; - - boolean result = true; - result = result && (hasType() == other.hasType()); - if (hasType()) { - result = result && - (getType() == other.getType()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasType()) { - hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.type_ = type_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; - if (other.hasType()) { - setType(other.getType()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasType()) { - - return false; - } - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - type_ = value; - } - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier.RegionSpecifierType type = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; - public boolean hasType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { - return type_; - } - public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - type_ = value; - onChanged(); - return this; - } - public Builder clearType() { - bitField0_ = (bitField0_ & ~0x00000001); - type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; - onChanged(); - return this; - } - - // required bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RegionSpecifier) - } - - static { - defaultInstance = new RegionSpecifier(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionSpecifier) - } - - public interface RegionLoadOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier regionSpecifier = 1; - boolean hasRegionSpecifier(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder(); - - // optional uint32 stores = 2; - boolean hasStores(); - int getStores(); - - // optional uint32 storefiles = 3; - boolean hasStorefiles(); - int getStorefiles(); - - // optional uint32 storeUncompressedSizeMB = 4; - boolean hasStoreUncompressedSizeMB(); - int getStoreUncompressedSizeMB(); - - // optional uint32 storefileSizeMB = 5; - boolean hasStorefileSizeMB(); - int getStorefileSizeMB(); - - // optional uint32 memstoreSizeMB = 6; - boolean hasMemstoreSizeMB(); - int getMemstoreSizeMB(); - - // optional uint32 storefileIndexSizeMB = 7; - boolean hasStorefileIndexSizeMB(); - int getStorefileIndexSizeMB(); - - // optional uint64 readRequestsCount = 8; - boolean hasReadRequestsCount(); - long getReadRequestsCount(); - - // optional uint64 writeRequestsCount = 9; - boolean hasWriteRequestsCount(); - long getWriteRequestsCount(); - - // optional uint64 totalCompactingKVs = 10; - boolean hasTotalCompactingKVs(); - long getTotalCompactingKVs(); - - // optional uint64 currentCompactedKVs = 11; - boolean hasCurrentCompactedKVs(); - long getCurrentCompactedKVs(); - - // optional uint32 rootIndexSizeKB = 12; - boolean hasRootIndexSizeKB(); - int getRootIndexSizeKB(); - - // optional uint32 totalStaticIndexSizeKB = 13; - boolean hasTotalStaticIndexSizeKB(); - int getTotalStaticIndexSizeKB(); - - // optional uint32 totalStaticBloomSizeKB = 14; - boolean hasTotalStaticBloomSizeKB(); - int getTotalStaticBloomSizeKB(); - - // repeated .Coprocessor coprocessors = 15; - java.util.List - getCoprocessorsList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index); - int getCoprocessorsCount(); - java.util.List - getCoprocessorsOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( - int index); - - // optional uint64 completeSequenceId = 16; - boolean hasCompleteSequenceId(); - long getCompleteSequenceId(); - } - public static final class RegionLoad extends - com.google.protobuf.GeneratedMessage - implements RegionLoadOrBuilder { - // Use RegionLoad.newBuilder() to construct. - private RegionLoad(Builder builder) { - super(builder); - } - private RegionLoad(boolean noInit) {} - - private static final RegionLoad defaultInstance; - public static RegionLoad getDefaultInstance() { - return defaultInstance; - } - - public RegionLoad getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier regionSpecifier = 1; - public static final int REGIONSPECIFIER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_; - public boolean hasRegionSpecifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { - return regionSpecifier_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { - return regionSpecifier_; - } - - // optional uint32 stores = 2; - public static final int STORES_FIELD_NUMBER = 2; - private int stores_; - public boolean hasStores() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getStores() { - return stores_; - } - - // optional uint32 storefiles = 3; - public static final int STOREFILES_FIELD_NUMBER = 3; - private int storefiles_; - public boolean hasStorefiles() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getStorefiles() { - return storefiles_; - } - - // optional uint32 storeUncompressedSizeMB = 4; - public static final int STOREUNCOMPRESSEDSIZEMB_FIELD_NUMBER = 4; - private int storeUncompressedSizeMB_; - public boolean hasStoreUncompressedSizeMB() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getStoreUncompressedSizeMB() { - return storeUncompressedSizeMB_; - } - - // optional uint32 storefileSizeMB = 5; - public static final int STOREFILESIZEMB_FIELD_NUMBER = 5; - private int storefileSizeMB_; - public boolean hasStorefileSizeMB() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public int getStorefileSizeMB() { - return storefileSizeMB_; - } - - // optional uint32 memstoreSizeMB = 6; - public static final int MEMSTORESIZEMB_FIELD_NUMBER = 6; - private int memstoreSizeMB_; - public boolean hasMemstoreSizeMB() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public int getMemstoreSizeMB() { - return memstoreSizeMB_; - } - - // optional uint32 storefileIndexSizeMB = 7; - public static final int STOREFILEINDEXSIZEMB_FIELD_NUMBER = 7; - private int storefileIndexSizeMB_; - public boolean hasStorefileIndexSizeMB() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getStorefileIndexSizeMB() { - return storefileIndexSizeMB_; - } - - // optional uint64 readRequestsCount = 8; - public static final int READREQUESTSCOUNT_FIELD_NUMBER = 8; - private long readRequestsCount_; - public boolean hasReadRequestsCount() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public long getReadRequestsCount() { - return readRequestsCount_; - } - - // optional uint64 writeRequestsCount = 9; - public static final int WRITEREQUESTSCOUNT_FIELD_NUMBER = 9; - private long writeRequestsCount_; - public boolean hasWriteRequestsCount() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public long getWriteRequestsCount() { - return writeRequestsCount_; - } - - // optional uint64 totalCompactingKVs = 10; - public static final int TOTALCOMPACTINGKVS_FIELD_NUMBER = 10; - private long totalCompactingKVs_; - public boolean hasTotalCompactingKVs() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - public long getTotalCompactingKVs() { - return totalCompactingKVs_; - } - - // optional uint64 currentCompactedKVs = 11; - public static final int CURRENTCOMPACTEDKVS_FIELD_NUMBER = 11; - private long currentCompactedKVs_; - public boolean hasCurrentCompactedKVs() { - return ((bitField0_ & 0x00000400) == 0x00000400); - } - public long getCurrentCompactedKVs() { - return currentCompactedKVs_; - } - - // optional uint32 rootIndexSizeKB = 12; - public static final int ROOTINDEXSIZEKB_FIELD_NUMBER = 12; - private int rootIndexSizeKB_; - public boolean hasRootIndexSizeKB() { - return ((bitField0_ & 0x00000800) == 0x00000800); - } - public int getRootIndexSizeKB() { - return rootIndexSizeKB_; - } - - // optional uint32 totalStaticIndexSizeKB = 13; - public static final int TOTALSTATICINDEXSIZEKB_FIELD_NUMBER = 13; - private int totalStaticIndexSizeKB_; - public boolean hasTotalStaticIndexSizeKB() { - return ((bitField0_ & 0x00001000) == 0x00001000); - } - public int getTotalStaticIndexSizeKB() { - return totalStaticIndexSizeKB_; - } - - // optional uint32 totalStaticBloomSizeKB = 14; - public static final int TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER = 14; - private int totalStaticBloomSizeKB_; - public boolean hasTotalStaticBloomSizeKB() { - return ((bitField0_ & 0x00002000) == 0x00002000); - } - public int getTotalStaticBloomSizeKB() { - return totalStaticBloomSizeKB_; - } - - // repeated .Coprocessor coprocessors = 15; - public static final int COPROCESSORS_FIELD_NUMBER = 15; - private java.util.List coprocessors_; - public java.util.List getCoprocessorsList() { - return coprocessors_; - } - public java.util.List - getCoprocessorsOrBuilderList() { - return coprocessors_; - } - public int getCoprocessorsCount() { - return coprocessors_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { - return coprocessors_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( - int index) { - return coprocessors_.get(index); - } - - // optional uint64 completeSequenceId = 16; - public static final int COMPLETESEQUENCEID_FIELD_NUMBER = 16; - private long completeSequenceId_; - public boolean hasCompleteSequenceId() { - return ((bitField0_ & 0x00004000) == 0x00004000); - } - public long getCompleteSequenceId() { - return completeSequenceId_; - } - - private void initFields() { - regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - stores_ = 0; - storefiles_ = 0; - storeUncompressedSizeMB_ = 0; - storefileSizeMB_ = 0; - memstoreSizeMB_ = 0; - storefileIndexSizeMB_ = 0; - readRequestsCount_ = 0L; - writeRequestsCount_ = 0L; - totalCompactingKVs_ = 0L; - currentCompactedKVs_ = 0L; - rootIndexSizeKB_ = 0; - totalStaticIndexSizeKB_ = 0; - totalStaticBloomSizeKB_ = 0; - coprocessors_ = java.util.Collections.emptyList(); - completeSequenceId_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionSpecifier()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegionSpecifier().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getCoprocessorsCount(); i++) { - if (!getCoprocessors(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionSpecifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, stores_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt32(3, storefiles_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt32(4, storeUncompressedSizeMB_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeUInt32(5, storefileSizeMB_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeUInt32(6, memstoreSizeMB_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeUInt32(7, storefileIndexSizeMB_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeUInt64(8, readRequestsCount_); - } - if (((bitField0_ & 0x00000100) == 0x00000100)) { - output.writeUInt64(9, writeRequestsCount_); - } - if (((bitField0_ & 0x00000200) == 0x00000200)) { - output.writeUInt64(10, totalCompactingKVs_); - } - if (((bitField0_ & 0x00000400) == 0x00000400)) { - output.writeUInt64(11, currentCompactedKVs_); - } - if (((bitField0_ & 0x00000800) == 0x00000800)) { - output.writeUInt32(12, rootIndexSizeKB_); - } - if (((bitField0_ & 0x00001000) == 0x00001000)) { - output.writeUInt32(13, totalStaticIndexSizeKB_); - } - if (((bitField0_ & 0x00002000) == 0x00002000)) { - output.writeUInt32(14, totalStaticBloomSizeKB_); - } - for (int i = 0; i < coprocessors_.size(); i++) { - output.writeMessage(15, coprocessors_.get(i)); - } - if (((bitField0_ & 0x00004000) == 0x00004000)) { - output.writeUInt64(16, completeSequenceId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionSpecifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, stores_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(3, storefiles_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, storeUncompressedSizeMB_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(5, storefileSizeMB_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(6, memstoreSizeMB_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(7, storefileIndexSizeMB_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(8, readRequestsCount_); - } - if (((bitField0_ & 0x00000100) == 0x00000100)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(9, writeRequestsCount_); - } - if (((bitField0_ & 0x00000200) == 0x00000200)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(10, totalCompactingKVs_); - } - if (((bitField0_ & 0x00000400) == 0x00000400)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(11, currentCompactedKVs_); - } - if (((bitField0_ & 0x00000800) == 0x00000800)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(12, rootIndexSizeKB_); - } - if (((bitField0_ & 0x00001000) == 0x00001000)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(13, totalStaticIndexSizeKB_); - } - if (((bitField0_ & 0x00002000) == 0x00002000)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(14, totalStaticBloomSizeKB_); - } - for (int i = 0; i < coprocessors_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(15, coprocessors_.get(i)); - } - if (((bitField0_ & 0x00004000) == 0x00004000)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(16, completeSequenceId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) obj; - - boolean result = true; - result = result && (hasRegionSpecifier() == other.hasRegionSpecifier()); - if (hasRegionSpecifier()) { - result = result && getRegionSpecifier() - .equals(other.getRegionSpecifier()); - } - result = result && (hasStores() == other.hasStores()); - if (hasStores()) { - result = result && (getStores() - == other.getStores()); - } - result = result && (hasStorefiles() == other.hasStorefiles()); - if (hasStorefiles()) { - result = result && (getStorefiles() - == other.getStorefiles()); - } - result = result && (hasStoreUncompressedSizeMB() == other.hasStoreUncompressedSizeMB()); - if (hasStoreUncompressedSizeMB()) { - result = result && (getStoreUncompressedSizeMB() - == other.getStoreUncompressedSizeMB()); - } - result = result && (hasStorefileSizeMB() == other.hasStorefileSizeMB()); - if (hasStorefileSizeMB()) { - result = result && (getStorefileSizeMB() - == other.getStorefileSizeMB()); - } - result = result && (hasMemstoreSizeMB() == other.hasMemstoreSizeMB()); - if (hasMemstoreSizeMB()) { - result = result && (getMemstoreSizeMB() - == other.getMemstoreSizeMB()); - } - result = result && (hasStorefileIndexSizeMB() == other.hasStorefileIndexSizeMB()); - if (hasStorefileIndexSizeMB()) { - result = result && (getStorefileIndexSizeMB() - == other.getStorefileIndexSizeMB()); - } - result = result && (hasReadRequestsCount() == other.hasReadRequestsCount()); - if (hasReadRequestsCount()) { - result = result && (getReadRequestsCount() - == other.getReadRequestsCount()); - } - result = result && (hasWriteRequestsCount() == other.hasWriteRequestsCount()); - if (hasWriteRequestsCount()) { - result = result && (getWriteRequestsCount() - == other.getWriteRequestsCount()); - } - result = result && (hasTotalCompactingKVs() == other.hasTotalCompactingKVs()); - if (hasTotalCompactingKVs()) { - result = result && (getTotalCompactingKVs() - == other.getTotalCompactingKVs()); - } - result = result && (hasCurrentCompactedKVs() == other.hasCurrentCompactedKVs()); - if (hasCurrentCompactedKVs()) { - result = result && (getCurrentCompactedKVs() - == other.getCurrentCompactedKVs()); - } - result = result && (hasRootIndexSizeKB() == other.hasRootIndexSizeKB()); - if (hasRootIndexSizeKB()) { - result = result && (getRootIndexSizeKB() - == other.getRootIndexSizeKB()); - } - result = result && (hasTotalStaticIndexSizeKB() == other.hasTotalStaticIndexSizeKB()); - if (hasTotalStaticIndexSizeKB()) { - result = result && (getTotalStaticIndexSizeKB() - == other.getTotalStaticIndexSizeKB()); - } - result = result && (hasTotalStaticBloomSizeKB() == other.hasTotalStaticBloomSizeKB()); - if (hasTotalStaticBloomSizeKB()) { - result = result && (getTotalStaticBloomSizeKB() - == other.getTotalStaticBloomSizeKB()); - } - result = result && getCoprocessorsList() - .equals(other.getCoprocessorsList()); - result = result && (hasCompleteSequenceId() == other.hasCompleteSequenceId()); - if (hasCompleteSequenceId()) { - result = result && (getCompleteSequenceId() - == other.getCompleteSequenceId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionSpecifier()) { - hash = (37 * hash) + REGIONSPECIFIER_FIELD_NUMBER; - hash = (53 * hash) + getRegionSpecifier().hashCode(); - } - if (hasStores()) { - hash = (37 * hash) + STORES_FIELD_NUMBER; - hash = (53 * hash) + getStores(); - } - if (hasStorefiles()) { - hash = (37 * hash) + STOREFILES_FIELD_NUMBER; - hash = (53 * hash) + getStorefiles(); - } - if (hasStoreUncompressedSizeMB()) { - hash = (37 * hash) + STOREUNCOMPRESSEDSIZEMB_FIELD_NUMBER; - hash = (53 * hash) + getStoreUncompressedSizeMB(); - } - if (hasStorefileSizeMB()) { - hash = (37 * hash) + STOREFILESIZEMB_FIELD_NUMBER; - hash = (53 * hash) + getStorefileSizeMB(); - } - if (hasMemstoreSizeMB()) { - hash = (37 * hash) + MEMSTORESIZEMB_FIELD_NUMBER; - hash = (53 * hash) + getMemstoreSizeMB(); - } - if (hasStorefileIndexSizeMB()) { - hash = (37 * hash) + STOREFILEINDEXSIZEMB_FIELD_NUMBER; - hash = (53 * hash) + getStorefileIndexSizeMB(); - } - if (hasReadRequestsCount()) { - hash = (37 * hash) + READREQUESTSCOUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReadRequestsCount()); - } - if (hasWriteRequestsCount()) { - hash = (37 * hash) + WRITEREQUESTSCOUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getWriteRequestsCount()); - } - if (hasTotalCompactingKVs()) { - hash = (37 * hash) + TOTALCOMPACTINGKVS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTotalCompactingKVs()); - } - if (hasCurrentCompactedKVs()) { - hash = (37 * hash) + CURRENTCOMPACTEDKVS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCurrentCompactedKVs()); - } - if (hasRootIndexSizeKB()) { - hash = (37 * hash) + ROOTINDEXSIZEKB_FIELD_NUMBER; - hash = (53 * hash) + getRootIndexSizeKB(); - } - if (hasTotalStaticIndexSizeKB()) { - hash = (37 * hash) + TOTALSTATICINDEXSIZEKB_FIELD_NUMBER; - hash = (53 * hash) + getTotalStaticIndexSizeKB(); - } - if (hasTotalStaticBloomSizeKB()) { - hash = (37 * hash) + TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER; - hash = (53 * hash) + getTotalStaticBloomSizeKB(); - } - if (getCoprocessorsCount() > 0) { - hash = (37 * hash) + COPROCESSORS_FIELD_NUMBER; - hash = (53 * hash) + getCoprocessorsList().hashCode(); - } - if (hasCompleteSequenceId()) { - hash = (37 * hash) + COMPLETESEQUENCEID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCompleteSequenceId()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionLoad_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionSpecifierFieldBuilder(); - getCoprocessorsFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionSpecifierBuilder_ == null) { - regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionSpecifierBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - stores_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - storefiles_ = 0; - bitField0_ = (bitField0_ & ~0x00000004); - storeUncompressedSizeMB_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - storefileSizeMB_ = 0; - bitField0_ = (bitField0_ & ~0x00000010); - memstoreSizeMB_ = 0; - bitField0_ = (bitField0_ & ~0x00000020); - storefileIndexSizeMB_ = 0; - bitField0_ = (bitField0_ & ~0x00000040); - readRequestsCount_ = 0L; - bitField0_ = (bitField0_ & ~0x00000080); - writeRequestsCount_ = 0L; - bitField0_ = (bitField0_ & ~0x00000100); - totalCompactingKVs_ = 0L; - bitField0_ = (bitField0_ & ~0x00000200); - currentCompactedKVs_ = 0L; - bitField0_ = (bitField0_ & ~0x00000400); - rootIndexSizeKB_ = 0; - bitField0_ = (bitField0_ & ~0x00000800); - totalStaticIndexSizeKB_ = 0; - bitField0_ = (bitField0_ & ~0x00001000); - totalStaticBloomSizeKB_ = 0; - bitField0_ = (bitField0_ & ~0x00002000); - if (coprocessorsBuilder_ == null) { - coprocessors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00004000); - } else { - coprocessorsBuilder_.clear(); - } - completeSequenceId_ = 0L; - bitField0_ = (bitField0_ & ~0x00008000); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionSpecifierBuilder_ == null) { - result.regionSpecifier_ = regionSpecifier_; - } else { - result.regionSpecifier_ = regionSpecifierBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.stores_ = stores_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.storefiles_ = storefiles_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.storeUncompressedSizeMB_ = storeUncompressedSizeMB_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.storefileSizeMB_ = storefileSizeMB_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.memstoreSizeMB_ = memstoreSizeMB_; - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000040; - } - result.storefileIndexSizeMB_ = storefileIndexSizeMB_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000080; - } - result.readRequestsCount_ = readRequestsCount_; - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000100; - } - result.writeRequestsCount_ = writeRequestsCount_; - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000200; - } - result.totalCompactingKVs_ = totalCompactingKVs_; - if (((from_bitField0_ & 0x00000400) == 0x00000400)) { - to_bitField0_ |= 0x00000400; - } - result.currentCompactedKVs_ = currentCompactedKVs_; - if (((from_bitField0_ & 0x00000800) == 0x00000800)) { - to_bitField0_ |= 0x00000800; - } - result.rootIndexSizeKB_ = rootIndexSizeKB_; - if (((from_bitField0_ & 0x00001000) == 0x00001000)) { - to_bitField0_ |= 0x00001000; - } - result.totalStaticIndexSizeKB_ = totalStaticIndexSizeKB_; - if (((from_bitField0_ & 0x00002000) == 0x00002000)) { - to_bitField0_ |= 0x00002000; - } - result.totalStaticBloomSizeKB_ = totalStaticBloomSizeKB_; - if (coprocessorsBuilder_ == null) { - if (((bitField0_ & 0x00004000) == 0x00004000)) { - coprocessors_ = java.util.Collections.unmodifiableList(coprocessors_); - bitField0_ = (bitField0_ & ~0x00004000); - } - result.coprocessors_ = coprocessors_; - } else { - result.coprocessors_ = coprocessorsBuilder_.build(); - } - if (((from_bitField0_ & 0x00008000) == 0x00008000)) { - to_bitField0_ |= 0x00004000; - } - result.completeSequenceId_ = completeSequenceId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()) return this; - if (other.hasRegionSpecifier()) { - mergeRegionSpecifier(other.getRegionSpecifier()); - } - if (other.hasStores()) { - setStores(other.getStores()); - } - if (other.hasStorefiles()) { - setStorefiles(other.getStorefiles()); - } - if (other.hasStoreUncompressedSizeMB()) { - setStoreUncompressedSizeMB(other.getStoreUncompressedSizeMB()); - } - if (other.hasStorefileSizeMB()) { - setStorefileSizeMB(other.getStorefileSizeMB()); - } - if (other.hasMemstoreSizeMB()) { - setMemstoreSizeMB(other.getMemstoreSizeMB()); - } - if (other.hasStorefileIndexSizeMB()) { - setStorefileIndexSizeMB(other.getStorefileIndexSizeMB()); - } - if (other.hasReadRequestsCount()) { - setReadRequestsCount(other.getReadRequestsCount()); - } - if (other.hasWriteRequestsCount()) { - setWriteRequestsCount(other.getWriteRequestsCount()); - } - if (other.hasTotalCompactingKVs()) { - setTotalCompactingKVs(other.getTotalCompactingKVs()); - } - if (other.hasCurrentCompactedKVs()) { - setCurrentCompactedKVs(other.getCurrentCompactedKVs()); - } - if (other.hasRootIndexSizeKB()) { - setRootIndexSizeKB(other.getRootIndexSizeKB()); - } - if (other.hasTotalStaticIndexSizeKB()) { - setTotalStaticIndexSizeKB(other.getTotalStaticIndexSizeKB()); - } - if (other.hasTotalStaticBloomSizeKB()) { - setTotalStaticBloomSizeKB(other.getTotalStaticBloomSizeKB()); - } - if (coprocessorsBuilder_ == null) { - if (!other.coprocessors_.isEmpty()) { - if (coprocessors_.isEmpty()) { - coprocessors_ = other.coprocessors_; - bitField0_ = (bitField0_ & ~0x00004000); - } else { - ensureCoprocessorsIsMutable(); - coprocessors_.addAll(other.coprocessors_); - } - onChanged(); - } - } else { - if (!other.coprocessors_.isEmpty()) { - if (coprocessorsBuilder_.isEmpty()) { - coprocessorsBuilder_.dispose(); - coprocessorsBuilder_ = null; - coprocessors_ = other.coprocessors_; - bitField0_ = (bitField0_ & ~0x00004000); - coprocessorsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getCoprocessorsFieldBuilder() : null; - } else { - coprocessorsBuilder_.addAllMessages(other.coprocessors_); - } - } - } - if (other.hasCompleteSequenceId()) { - setCompleteSequenceId(other.getCompleteSequenceId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionSpecifier()) { - - return false; - } - if (!getRegionSpecifier().isInitialized()) { - - return false; - } - for (int i = 0; i < getCoprocessorsCount(); i++) { - if (!getCoprocessors(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegionSpecifier()) { - subBuilder.mergeFrom(getRegionSpecifier()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionSpecifier(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - stores_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - storefiles_ = input.readUInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - storeUncompressedSizeMB_ = input.readUInt32(); - break; - } - case 40: { - bitField0_ |= 0x00000010; - storefileSizeMB_ = input.readUInt32(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - memstoreSizeMB_ = input.readUInt32(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - storefileIndexSizeMB_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - readRequestsCount_ = input.readUInt64(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - writeRequestsCount_ = input.readUInt64(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - totalCompactingKVs_ = input.readUInt64(); - break; - } - case 88: { - bitField0_ |= 0x00000400; - currentCompactedKVs_ = input.readUInt64(); - break; - } - case 96: { - bitField0_ |= 0x00000800; - rootIndexSizeKB_ = input.readUInt32(); - break; - } - case 104: { - bitField0_ |= 0x00001000; - totalStaticIndexSizeKB_ = input.readUInt32(); - break; - } - case 112: { - bitField0_ |= 0x00002000; - totalStaticBloomSizeKB_ = input.readUInt32(); - break; - } - case 122: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addCoprocessors(subBuilder.buildPartial()); - break; - } - case 128: { - bitField0_ |= 0x00008000; - completeSequenceId_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier regionSpecifier = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionSpecifierBuilder_; - public boolean hasRegionSpecifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { - if (regionSpecifierBuilder_ == null) { - return regionSpecifier_; - } else { - return regionSpecifierBuilder_.getMessage(); - } - } - public Builder setRegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionSpecifierBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - regionSpecifier_ = value; - onChanged(); - } else { - regionSpecifierBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegionSpecifier( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionSpecifierBuilder_ == null) { - regionSpecifier_ = builderForValue.build(); - onChanged(); - } else { - regionSpecifierBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionSpecifierBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - regionSpecifier_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - regionSpecifier_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionSpecifier_).mergeFrom(value).buildPartial(); - } else { - regionSpecifier_ = value; - } - onChanged(); - } else { - regionSpecifierBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegionSpecifier() { - if (regionSpecifierBuilder_ == null) { - regionSpecifier_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionSpecifierBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionSpecifierBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionSpecifierFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { - if (regionSpecifierBuilder_ != null) { - return regionSpecifierBuilder_.getMessageOrBuilder(); - } else { - return regionSpecifier_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionSpecifierFieldBuilder() { - if (regionSpecifierBuilder_ == null) { - regionSpecifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - regionSpecifier_, - getParentForChildren(), - isClean()); - regionSpecifier_ = null; - } - return regionSpecifierBuilder_; - } - - // optional uint32 stores = 2; - private int stores_ ; - public boolean hasStores() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getStores() { - return stores_; - } - public Builder setStores(int value) { - bitField0_ |= 0x00000002; - stores_ = value; - onChanged(); - return this; - } - public Builder clearStores() { - bitField0_ = (bitField0_ & ~0x00000002); - stores_ = 0; - onChanged(); - return this; - } - - // optional uint32 storefiles = 3; - private int storefiles_ ; - public boolean hasStorefiles() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getStorefiles() { - return storefiles_; - } - public Builder setStorefiles(int value) { - bitField0_ |= 0x00000004; - storefiles_ = value; - onChanged(); - return this; - } - public Builder clearStorefiles() { - bitField0_ = (bitField0_ & ~0x00000004); - storefiles_ = 0; - onChanged(); - return this; - } - - // optional uint32 storeUncompressedSizeMB = 4; - private int storeUncompressedSizeMB_ ; - public boolean hasStoreUncompressedSizeMB() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getStoreUncompressedSizeMB() { - return storeUncompressedSizeMB_; - } - public Builder setStoreUncompressedSizeMB(int value) { - bitField0_ |= 0x00000008; - storeUncompressedSizeMB_ = value; - onChanged(); - return this; - } - public Builder clearStoreUncompressedSizeMB() { - bitField0_ = (bitField0_ & ~0x00000008); - storeUncompressedSizeMB_ = 0; - onChanged(); - return this; - } - - // optional uint32 storefileSizeMB = 5; - private int storefileSizeMB_ ; - public boolean hasStorefileSizeMB() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public int getStorefileSizeMB() { - return storefileSizeMB_; - } - public Builder setStorefileSizeMB(int value) { - bitField0_ |= 0x00000010; - storefileSizeMB_ = value; - onChanged(); - return this; - } - public Builder clearStorefileSizeMB() { - bitField0_ = (bitField0_ & ~0x00000010); - storefileSizeMB_ = 0; - onChanged(); - return this; - } - - // optional uint32 memstoreSizeMB = 6; - private int memstoreSizeMB_ ; - public boolean hasMemstoreSizeMB() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public int getMemstoreSizeMB() { - return memstoreSizeMB_; - } - public Builder setMemstoreSizeMB(int value) { - bitField0_ |= 0x00000020; - memstoreSizeMB_ = value; - onChanged(); - return this; - } - public Builder clearMemstoreSizeMB() { - bitField0_ = (bitField0_ & ~0x00000020); - memstoreSizeMB_ = 0; - onChanged(); - return this; - } - - // optional uint32 storefileIndexSizeMB = 7; - private int storefileIndexSizeMB_ ; - public boolean hasStorefileIndexSizeMB() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getStorefileIndexSizeMB() { - return storefileIndexSizeMB_; - } - public Builder setStorefileIndexSizeMB(int value) { - bitField0_ |= 0x00000040; - storefileIndexSizeMB_ = value; - onChanged(); - return this; - } - public Builder clearStorefileIndexSizeMB() { - bitField0_ = (bitField0_ & ~0x00000040); - storefileIndexSizeMB_ = 0; - onChanged(); - return this; - } - - // optional uint64 readRequestsCount = 8; - private long readRequestsCount_ ; - public boolean hasReadRequestsCount() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public long getReadRequestsCount() { - return readRequestsCount_; - } - public Builder setReadRequestsCount(long value) { - bitField0_ |= 0x00000080; - readRequestsCount_ = value; - onChanged(); - return this; - } - public Builder clearReadRequestsCount() { - bitField0_ = (bitField0_ & ~0x00000080); - readRequestsCount_ = 0L; - onChanged(); - return this; - } - - // optional uint64 writeRequestsCount = 9; - private long writeRequestsCount_ ; - public boolean hasWriteRequestsCount() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public long getWriteRequestsCount() { - return writeRequestsCount_; - } - public Builder setWriteRequestsCount(long value) { - bitField0_ |= 0x00000100; - writeRequestsCount_ = value; - onChanged(); - return this; - } - public Builder clearWriteRequestsCount() { - bitField0_ = (bitField0_ & ~0x00000100); - writeRequestsCount_ = 0L; - onChanged(); - return this; - } - - // optional uint64 totalCompactingKVs = 10; - private long totalCompactingKVs_ ; - public boolean hasTotalCompactingKVs() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - public long getTotalCompactingKVs() { - return totalCompactingKVs_; - } - public Builder setTotalCompactingKVs(long value) { - bitField0_ |= 0x00000200; - totalCompactingKVs_ = value; - onChanged(); - return this; - } - public Builder clearTotalCompactingKVs() { - bitField0_ = (bitField0_ & ~0x00000200); - totalCompactingKVs_ = 0L; - onChanged(); - return this; - } - - // optional uint64 currentCompactedKVs = 11; - private long currentCompactedKVs_ ; - public boolean hasCurrentCompactedKVs() { - return ((bitField0_ & 0x00000400) == 0x00000400); - } - public long getCurrentCompactedKVs() { - return currentCompactedKVs_; - } - public Builder setCurrentCompactedKVs(long value) { - bitField0_ |= 0x00000400; - currentCompactedKVs_ = value; - onChanged(); - return this; - } - public Builder clearCurrentCompactedKVs() { - bitField0_ = (bitField0_ & ~0x00000400); - currentCompactedKVs_ = 0L; - onChanged(); - return this; - } - - // optional uint32 rootIndexSizeKB = 12; - private int rootIndexSizeKB_ ; - public boolean hasRootIndexSizeKB() { - return ((bitField0_ & 0x00000800) == 0x00000800); - } - public int getRootIndexSizeKB() { - return rootIndexSizeKB_; - } - public Builder setRootIndexSizeKB(int value) { - bitField0_ |= 0x00000800; - rootIndexSizeKB_ = value; - onChanged(); - return this; - } - public Builder clearRootIndexSizeKB() { - bitField0_ = (bitField0_ & ~0x00000800); - rootIndexSizeKB_ = 0; - onChanged(); - return this; - } - - // optional uint32 totalStaticIndexSizeKB = 13; - private int totalStaticIndexSizeKB_ ; - public boolean hasTotalStaticIndexSizeKB() { - return ((bitField0_ & 0x00001000) == 0x00001000); - } - public int getTotalStaticIndexSizeKB() { - return totalStaticIndexSizeKB_; - } - public Builder setTotalStaticIndexSizeKB(int value) { - bitField0_ |= 0x00001000; - totalStaticIndexSizeKB_ = value; - onChanged(); - return this; - } - public Builder clearTotalStaticIndexSizeKB() { - bitField0_ = (bitField0_ & ~0x00001000); - totalStaticIndexSizeKB_ = 0; - onChanged(); - return this; - } - - // optional uint32 totalStaticBloomSizeKB = 14; - private int totalStaticBloomSizeKB_ ; - public boolean hasTotalStaticBloomSizeKB() { - return ((bitField0_ & 0x00002000) == 0x00002000); - } - public int getTotalStaticBloomSizeKB() { - return totalStaticBloomSizeKB_; - } - public Builder setTotalStaticBloomSizeKB(int value) { - bitField0_ |= 0x00002000; - totalStaticBloomSizeKB_ = value; - onChanged(); - return this; - } - public Builder clearTotalStaticBloomSizeKB() { - bitField0_ = (bitField0_ & ~0x00002000); - totalStaticBloomSizeKB_ = 0; - onChanged(); - return this; - } - - // repeated .Coprocessor coprocessors = 15; - private java.util.List coprocessors_ = - java.util.Collections.emptyList(); - private void ensureCoprocessorsIsMutable() { - if (!((bitField0_ & 0x00004000) == 0x00004000)) { - coprocessors_ = new java.util.ArrayList(coprocessors_); - bitField0_ |= 0x00004000; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_; - - public java.util.List getCoprocessorsList() { - if (coprocessorsBuilder_ == null) { - return java.util.Collections.unmodifiableList(coprocessors_); - } else { - return coprocessorsBuilder_.getMessageList(); - } - } - public int getCoprocessorsCount() { - if (coprocessorsBuilder_ == null) { - return coprocessors_.size(); - } else { - return coprocessorsBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { - if (coprocessorsBuilder_ == null) { - return coprocessors_.get(index); - } else { - return coprocessorsBuilder_.getMessage(index); - } - } - public Builder setCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (coprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureCoprocessorsIsMutable(); - coprocessors_.set(index, value); - onChanged(); - } else { - coprocessorsBuilder_.setMessage(index, value); - } - return this; - } - public Builder setCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.set(index, builderForValue.build()); - onChanged(); - } else { - coprocessorsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (coprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureCoprocessorsIsMutable(); - coprocessors_.add(value); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(value); - } - return this; - } - public Builder addCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (coprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureCoprocessorsIsMutable(); - coprocessors_.add(index, value); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(index, value); - } - return this; - } - public Builder addCoprocessors( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.add(builderForValue.build()); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.add(index, builderForValue.build()); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllCoprocessors( - java.lang.Iterable values) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - super.addAll(values, coprocessors_); - onChanged(); - } else { - coprocessorsBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearCoprocessors() { - if (coprocessorsBuilder_ == null) { - coprocessors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00004000); - onChanged(); - } else { - coprocessorsBuilder_.clear(); - } - return this; - } - public Builder removeCoprocessors(int index) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.remove(index); - onChanged(); - } else { - coprocessorsBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder( - int index) { - return getCoprocessorsFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( - int index) { - if (coprocessorsBuilder_ == null) { - return coprocessors_.get(index); } else { - return coprocessorsBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getCoprocessorsOrBuilderList() { - if (coprocessorsBuilder_ != null) { - return coprocessorsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(coprocessors_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() { - return getCoprocessorsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder( - int index) { - return getCoprocessorsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); - } - public java.util.List - getCoprocessorsBuilderList() { - return getCoprocessorsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> - getCoprocessorsFieldBuilder() { - if (coprocessorsBuilder_ == null) { - coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( - coprocessors_, - ((bitField0_ & 0x00004000) == 0x00004000), - getParentForChildren(), - isClean()); - coprocessors_ = null; - } - return coprocessorsBuilder_; - } - - // optional uint64 completeSequenceId = 16; - private long completeSequenceId_ ; - public boolean hasCompleteSequenceId() { - return ((bitField0_ & 0x00008000) == 0x00008000); - } - public long getCompleteSequenceId() { - return completeSequenceId_; - } - public Builder setCompleteSequenceId(long value) { - bitField0_ |= 0x00008000; - completeSequenceId_ = value; - onChanged(); - return this; - } - public Builder clearCompleteSequenceId() { - bitField0_ = (bitField0_ & ~0x00008000); - completeSequenceId_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RegionLoad) - } - - static { - defaultInstance = new RegionLoad(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionLoad) - } - - public interface ServerLoadOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional uint32 numberOfRequests = 1; - boolean hasNumberOfRequests(); - int getNumberOfRequests(); - - // optional uint32 totalNumberOfRequests = 2; - boolean hasTotalNumberOfRequests(); - int getTotalNumberOfRequests(); - - // optional uint32 usedHeapMB = 3; - boolean hasUsedHeapMB(); - int getUsedHeapMB(); - - // optional uint32 maxHeapMB = 4; - boolean hasMaxHeapMB(); - int getMaxHeapMB(); - - // repeated .RegionLoad regionLoads = 5; - java.util.List - getRegionLoadsList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index); - int getRegionLoadsCount(); - java.util.List - getRegionLoadsOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( - int index); - - // repeated .Coprocessor coprocessors = 6; - java.util.List - getCoprocessorsList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index); - int getCoprocessorsCount(); - java.util.List - getCoprocessorsOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( - int index); - - // optional uint64 reportStartTime = 7; - boolean hasReportStartTime(); - long getReportStartTime(); - - // optional uint64 reportEndTime = 8; - boolean hasReportEndTime(); - long getReportEndTime(); - - // optional uint32 infoServerPort = 9; - boolean hasInfoServerPort(); - int getInfoServerPort(); - } - public static final class ServerLoad extends - com.google.protobuf.GeneratedMessage - implements ServerLoadOrBuilder { - // Use ServerLoad.newBuilder() to construct. - private ServerLoad(Builder builder) { - super(builder); - } - private ServerLoad(boolean noInit) {} - - private static final ServerLoad defaultInstance; - public static ServerLoad getDefaultInstance() { - return defaultInstance; - } - - public ServerLoad getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable; - } - - private int bitField0_; - // optional uint32 numberOfRequests = 1; - public static final int NUMBEROFREQUESTS_FIELD_NUMBER = 1; - private int numberOfRequests_; - public boolean hasNumberOfRequests() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getNumberOfRequests() { - return numberOfRequests_; - } - - // optional uint32 totalNumberOfRequests = 2; - public static final int TOTALNUMBEROFREQUESTS_FIELD_NUMBER = 2; - private int totalNumberOfRequests_; - public boolean hasTotalNumberOfRequests() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTotalNumberOfRequests() { - return totalNumberOfRequests_; - } - - // optional uint32 usedHeapMB = 3; - public static final int USEDHEAPMB_FIELD_NUMBER = 3; - private int usedHeapMB_; - public boolean hasUsedHeapMB() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getUsedHeapMB() { - return usedHeapMB_; - } - - // optional uint32 maxHeapMB = 4; - public static final int MAXHEAPMB_FIELD_NUMBER = 4; - private int maxHeapMB_; - public boolean hasMaxHeapMB() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getMaxHeapMB() { - return maxHeapMB_; - } - - // repeated .RegionLoad regionLoads = 5; - public static final int REGIONLOADS_FIELD_NUMBER = 5; - private java.util.List regionLoads_; - public java.util.List getRegionLoadsList() { - return regionLoads_; - } - public java.util.List - getRegionLoadsOrBuilderList() { - return regionLoads_; - } - public int getRegionLoadsCount() { - return regionLoads_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index) { - return regionLoads_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( - int index) { - return regionLoads_.get(index); - } - - // repeated .Coprocessor coprocessors = 6; - public static final int COPROCESSORS_FIELD_NUMBER = 6; - private java.util.List coprocessors_; - public java.util.List getCoprocessorsList() { - return coprocessors_; - } - public java.util.List - getCoprocessorsOrBuilderList() { - return coprocessors_; - } - public int getCoprocessorsCount() { - return coprocessors_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { - return coprocessors_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( - int index) { - return coprocessors_.get(index); - } - - // optional uint64 reportStartTime = 7; - public static final int REPORTSTARTTIME_FIELD_NUMBER = 7; - private long reportStartTime_; - public boolean hasReportStartTime() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public long getReportStartTime() { - return reportStartTime_; - } - - // optional uint64 reportEndTime = 8; - public static final int REPORTENDTIME_FIELD_NUMBER = 8; - private long reportEndTime_; - public boolean hasReportEndTime() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public long getReportEndTime() { - return reportEndTime_; - } - - // optional uint32 infoServerPort = 9; - public static final int INFOSERVERPORT_FIELD_NUMBER = 9; - private int infoServerPort_; - public boolean hasInfoServerPort() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getInfoServerPort() { - return infoServerPort_; - } - - private void initFields() { - numberOfRequests_ = 0; - totalNumberOfRequests_ = 0; - usedHeapMB_ = 0; - maxHeapMB_ = 0; - regionLoads_ = java.util.Collections.emptyList(); - coprocessors_ = java.util.Collections.emptyList(); - reportStartTime_ = 0L; - reportEndTime_ = 0L; - infoServerPort_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getRegionLoadsCount(); i++) { - if (!getRegionLoads(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getCoprocessorsCount(); i++) { - if (!getCoprocessors(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, numberOfRequests_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, totalNumberOfRequests_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt32(3, usedHeapMB_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt32(4, maxHeapMB_); - } - for (int i = 0; i < regionLoads_.size(); i++) { - output.writeMessage(5, regionLoads_.get(i)); - } - for (int i = 0; i < coprocessors_.size(); i++) { - output.writeMessage(6, coprocessors_.get(i)); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeUInt64(7, reportStartTime_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeUInt64(8, reportEndTime_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeUInt32(9, infoServerPort_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, numberOfRequests_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, totalNumberOfRequests_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(3, usedHeapMB_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, maxHeapMB_); - } - for (int i = 0; i < regionLoads_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, regionLoads_.get(i)); - } - for (int i = 0; i < coprocessors_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, coprocessors_.get(i)); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(7, reportStartTime_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(8, reportEndTime_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(9, infoServerPort_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) obj; - - boolean result = true; - result = result && (hasNumberOfRequests() == other.hasNumberOfRequests()); - if (hasNumberOfRequests()) { - result = result && (getNumberOfRequests() - == other.getNumberOfRequests()); - } - result = result && (hasTotalNumberOfRequests() == other.hasTotalNumberOfRequests()); - if (hasTotalNumberOfRequests()) { - result = result && (getTotalNumberOfRequests() - == other.getTotalNumberOfRequests()); - } - result = result && (hasUsedHeapMB() == other.hasUsedHeapMB()); - if (hasUsedHeapMB()) { - result = result && (getUsedHeapMB() - == other.getUsedHeapMB()); - } - result = result && (hasMaxHeapMB() == other.hasMaxHeapMB()); - if (hasMaxHeapMB()) { - result = result && (getMaxHeapMB() - == other.getMaxHeapMB()); - } - result = result && getRegionLoadsList() - .equals(other.getRegionLoadsList()); - result = result && getCoprocessorsList() - .equals(other.getCoprocessorsList()); - result = result && (hasReportStartTime() == other.hasReportStartTime()); - if (hasReportStartTime()) { - result = result && (getReportStartTime() - == other.getReportStartTime()); - } - result = result && (hasReportEndTime() == other.hasReportEndTime()); - if (hasReportEndTime()) { - result = result && (getReportEndTime() - == other.getReportEndTime()); - } - result = result && (hasInfoServerPort() == other.hasInfoServerPort()); - if (hasInfoServerPort()) { - result = result && (getInfoServerPort() - == other.getInfoServerPort()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasNumberOfRequests()) { - hash = (37 * hash) + NUMBEROFREQUESTS_FIELD_NUMBER; - hash = (53 * hash) + getNumberOfRequests(); - } - if (hasTotalNumberOfRequests()) { - hash = (37 * hash) + TOTALNUMBEROFREQUESTS_FIELD_NUMBER; - hash = (53 * hash) + getTotalNumberOfRequests(); - } - if (hasUsedHeapMB()) { - hash = (37 * hash) + USEDHEAPMB_FIELD_NUMBER; - hash = (53 * hash) + getUsedHeapMB(); - } - if (hasMaxHeapMB()) { - hash = (37 * hash) + MAXHEAPMB_FIELD_NUMBER; - hash = (53 * hash) + getMaxHeapMB(); - } - if (getRegionLoadsCount() > 0) { - hash = (37 * hash) + REGIONLOADS_FIELD_NUMBER; - hash = (53 * hash) + getRegionLoadsList().hashCode(); - } - if (getCoprocessorsCount() > 0) { - hash = (37 * hash) + COPROCESSORS_FIELD_NUMBER; - hash = (53 * hash) + getCoprocessorsList().hashCode(); - } - if (hasReportStartTime()) { - hash = (37 * hash) + REPORTSTARTTIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReportStartTime()); - } - if (hasReportEndTime()) { - hash = (37 * hash) + REPORTENDTIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReportEndTime()); - } - if (hasInfoServerPort()) { - hash = (37 * hash) + INFOSERVERPORT_FIELD_NUMBER; - hash = (53 * hash) + getInfoServerPort(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerLoad_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionLoadsFieldBuilder(); - getCoprocessorsFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - numberOfRequests_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - totalNumberOfRequests_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - usedHeapMB_ = 0; - bitField0_ = (bitField0_ & ~0x00000004); - maxHeapMB_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - if (regionLoadsBuilder_ == null) { - regionLoads_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - } else { - regionLoadsBuilder_.clear(); - } - if (coprocessorsBuilder_ == null) { - coprocessors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - } else { - coprocessorsBuilder_.clear(); - } - reportStartTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000040); - reportEndTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000080); - infoServerPort_ = 0; - bitField0_ = (bitField0_ & ~0x00000100); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.numberOfRequests_ = numberOfRequests_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.totalNumberOfRequests_ = totalNumberOfRequests_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.usedHeapMB_ = usedHeapMB_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.maxHeapMB_ = maxHeapMB_; - if (regionLoadsBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010)) { - regionLoads_ = java.util.Collections.unmodifiableList(regionLoads_); - bitField0_ = (bitField0_ & ~0x00000010); - } - result.regionLoads_ = regionLoads_; - } else { - result.regionLoads_ = regionLoadsBuilder_.build(); - } - if (coprocessorsBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020)) { - coprocessors_ = java.util.Collections.unmodifiableList(coprocessors_); - bitField0_ = (bitField0_ & ~0x00000020); - } - result.coprocessors_ = coprocessors_; - } else { - result.coprocessors_ = coprocessorsBuilder_.build(); - } - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.reportStartTime_ = reportStartTime_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - result.reportEndTime_ = reportEndTime_; - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000040; - } - result.infoServerPort_ = infoServerPort_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) return this; - if (other.hasNumberOfRequests()) { - setNumberOfRequests(other.getNumberOfRequests()); - } - if (other.hasTotalNumberOfRequests()) { - setTotalNumberOfRequests(other.getTotalNumberOfRequests()); - } - if (other.hasUsedHeapMB()) { - setUsedHeapMB(other.getUsedHeapMB()); - } - if (other.hasMaxHeapMB()) { - setMaxHeapMB(other.getMaxHeapMB()); - } - if (regionLoadsBuilder_ == null) { - if (!other.regionLoads_.isEmpty()) { - if (regionLoads_.isEmpty()) { - regionLoads_ = other.regionLoads_; - bitField0_ = (bitField0_ & ~0x00000010); - } else { - ensureRegionLoadsIsMutable(); - regionLoads_.addAll(other.regionLoads_); - } - onChanged(); - } - } else { - if (!other.regionLoads_.isEmpty()) { - if (regionLoadsBuilder_.isEmpty()) { - regionLoadsBuilder_.dispose(); - regionLoadsBuilder_ = null; - regionLoads_ = other.regionLoads_; - bitField0_ = (bitField0_ & ~0x00000010); - regionLoadsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRegionLoadsFieldBuilder() : null; - } else { - regionLoadsBuilder_.addAllMessages(other.regionLoads_); - } - } - } - if (coprocessorsBuilder_ == null) { - if (!other.coprocessors_.isEmpty()) { - if (coprocessors_.isEmpty()) { - coprocessors_ = other.coprocessors_; - bitField0_ = (bitField0_ & ~0x00000020); - } else { - ensureCoprocessorsIsMutable(); - coprocessors_.addAll(other.coprocessors_); - } - onChanged(); - } - } else { - if (!other.coprocessors_.isEmpty()) { - if (coprocessorsBuilder_.isEmpty()) { - coprocessorsBuilder_.dispose(); - coprocessorsBuilder_ = null; - coprocessors_ = other.coprocessors_; - bitField0_ = (bitField0_ & ~0x00000020); - coprocessorsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getCoprocessorsFieldBuilder() : null; - } else { - coprocessorsBuilder_.addAllMessages(other.coprocessors_); - } - } - } - if (other.hasReportStartTime()) { - setReportStartTime(other.getReportStartTime()); - } - if (other.hasReportEndTime()) { - setReportEndTime(other.getReportEndTime()); - } - if (other.hasInfoServerPort()) { - setInfoServerPort(other.getInfoServerPort()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getRegionLoadsCount(); i++) { - if (!getRegionLoads(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getCoprocessorsCount(); i++) { - if (!getCoprocessors(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - numberOfRequests_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - totalNumberOfRequests_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - usedHeapMB_ = input.readUInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - maxHeapMB_ = input.readUInt32(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionLoads(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addCoprocessors(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - reportStartTime_ = input.readUInt64(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - reportEndTime_ = input.readUInt64(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - infoServerPort_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // optional uint32 numberOfRequests = 1; - private int numberOfRequests_ ; - public boolean hasNumberOfRequests() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getNumberOfRequests() { - return numberOfRequests_; - } - public Builder setNumberOfRequests(int value) { - bitField0_ |= 0x00000001; - numberOfRequests_ = value; - onChanged(); - return this; - } - public Builder clearNumberOfRequests() { - bitField0_ = (bitField0_ & ~0x00000001); - numberOfRequests_ = 0; - onChanged(); - return this; - } - - // optional uint32 totalNumberOfRequests = 2; - private int totalNumberOfRequests_ ; - public boolean hasTotalNumberOfRequests() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTotalNumberOfRequests() { - return totalNumberOfRequests_; - } - public Builder setTotalNumberOfRequests(int value) { - bitField0_ |= 0x00000002; - totalNumberOfRequests_ = value; - onChanged(); - return this; - } - public Builder clearTotalNumberOfRequests() { - bitField0_ = (bitField0_ & ~0x00000002); - totalNumberOfRequests_ = 0; - onChanged(); - return this; - } - - // optional uint32 usedHeapMB = 3; - private int usedHeapMB_ ; - public boolean hasUsedHeapMB() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getUsedHeapMB() { - return usedHeapMB_; - } - public Builder setUsedHeapMB(int value) { - bitField0_ |= 0x00000004; - usedHeapMB_ = value; - onChanged(); - return this; - } - public Builder clearUsedHeapMB() { - bitField0_ = (bitField0_ & ~0x00000004); - usedHeapMB_ = 0; - onChanged(); - return this; - } - - // optional uint32 maxHeapMB = 4; - private int maxHeapMB_ ; - public boolean hasMaxHeapMB() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getMaxHeapMB() { - return maxHeapMB_; - } - public Builder setMaxHeapMB(int value) { - bitField0_ |= 0x00000008; - maxHeapMB_ = value; - onChanged(); - return this; - } - public Builder clearMaxHeapMB() { - bitField0_ = (bitField0_ & ~0x00000008); - maxHeapMB_ = 0; - onChanged(); - return this; - } - - // repeated .RegionLoad regionLoads = 5; - private java.util.List regionLoads_ = - java.util.Collections.emptyList(); - private void ensureRegionLoadsIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - regionLoads_ = new java.util.ArrayList(regionLoads_); - bitField0_ |= 0x00000010; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder> regionLoadsBuilder_; - - public java.util.List getRegionLoadsList() { - if (regionLoadsBuilder_ == null) { - return java.util.Collections.unmodifiableList(regionLoads_); - } else { - return regionLoadsBuilder_.getMessageList(); - } - } - public int getRegionLoadsCount() { - if (regionLoadsBuilder_ == null) { - return regionLoads_.size(); - } else { - return regionLoadsBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad getRegionLoads(int index) { - if (regionLoadsBuilder_ == null) { - return regionLoads_.get(index); - } else { - return regionLoadsBuilder_.getMessage(index); - } - } - public Builder setRegionLoads( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { - if (regionLoadsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionLoadsIsMutable(); - regionLoads_.set(index, value); - onChanged(); - } else { - regionLoadsBuilder_.setMessage(index, value); - } - return this; - } - public Builder setRegionLoads( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { - if (regionLoadsBuilder_ == null) { - ensureRegionLoadsIsMutable(); - regionLoads_.set(index, builderForValue.build()); - onChanged(); - } else { - regionLoadsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addRegionLoads(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { - if (regionLoadsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionLoadsIsMutable(); - regionLoads_.add(value); - onChanged(); - } else { - regionLoadsBuilder_.addMessage(value); - } - return this; - } - public Builder addRegionLoads( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad value) { - if (regionLoadsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionLoadsIsMutable(); - regionLoads_.add(index, value); - onChanged(); - } else { - regionLoadsBuilder_.addMessage(index, value); - } - return this; - } - public Builder addRegionLoads( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { - if (regionLoadsBuilder_ == null) { - ensureRegionLoadsIsMutable(); - regionLoads_.add(builderForValue.build()); - onChanged(); - } else { - regionLoadsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addRegionLoads( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder builderForValue) { - if (regionLoadsBuilder_ == null) { - ensureRegionLoadsIsMutable(); - regionLoads_.add(index, builderForValue.build()); - onChanged(); - } else { - regionLoadsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllRegionLoads( - java.lang.Iterable values) { - if (regionLoadsBuilder_ == null) { - ensureRegionLoadsIsMutable(); - super.addAll(values, regionLoads_); - onChanged(); - } else { - regionLoadsBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearRegionLoads() { - if (regionLoadsBuilder_ == null) { - regionLoads_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - onChanged(); - } else { - regionLoadsBuilder_.clear(); - } - return this; - } - public Builder removeRegionLoads(int index) { - if (regionLoadsBuilder_ == null) { - ensureRegionLoadsIsMutable(); - regionLoads_.remove(index); - onChanged(); - } else { - regionLoadsBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder getRegionLoadsBuilder( - int index) { - return getRegionLoadsFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( - int index) { - if (regionLoadsBuilder_ == null) { - return regionLoads_.get(index); } else { - return regionLoadsBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getRegionLoadsOrBuilderList() { - if (regionLoadsBuilder_ != null) { - return regionLoadsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(regionLoads_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder addRegionLoadsBuilder() { - return getRegionLoadsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder addRegionLoadsBuilder( - int index) { - return getRegionLoadsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.getDefaultInstance()); - } - public java.util.List - getRegionLoadsBuilderList() { - return getRegionLoadsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder> - getRegionLoadsFieldBuilder() { - if (regionLoadsBuilder_ == null) { - regionLoadsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoadOrBuilder>( - regionLoads_, - ((bitField0_ & 0x00000010) == 0x00000010), - getParentForChildren(), - isClean()); - regionLoads_ = null; - } - return regionLoadsBuilder_; - } - - // repeated .Coprocessor coprocessors = 6; - private java.util.List coprocessors_ = - java.util.Collections.emptyList(); - private void ensureCoprocessorsIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - coprocessors_ = new java.util.ArrayList(coprocessors_); - bitField0_ |= 0x00000020; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_; - - public java.util.List getCoprocessorsList() { - if (coprocessorsBuilder_ == null) { - return java.util.Collections.unmodifiableList(coprocessors_); - } else { - return coprocessorsBuilder_.getMessageList(); - } - } - public int getCoprocessorsCount() { - if (coprocessorsBuilder_ == null) { - return coprocessors_.size(); - } else { - return coprocessorsBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { - if (coprocessorsBuilder_ == null) { - return coprocessors_.get(index); - } else { - return coprocessorsBuilder_.getMessage(index); - } - } - public Builder setCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (coprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureCoprocessorsIsMutable(); - coprocessors_.set(index, value); - onChanged(); - } else { - coprocessorsBuilder_.setMessage(index, value); - } - return this; - } - public Builder setCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.set(index, builderForValue.build()); - onChanged(); - } else { - coprocessorsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (coprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureCoprocessorsIsMutable(); - coprocessors_.add(value); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(value); - } - return this; - } - public Builder addCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { - if (coprocessorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureCoprocessorsIsMutable(); - coprocessors_.add(index, value); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(index, value); - } - return this; - } - public Builder addCoprocessors( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.add(builderForValue.build()); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addCoprocessors( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.add(index, builderForValue.build()); - onChanged(); - } else { - coprocessorsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllCoprocessors( - java.lang.Iterable values) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - super.addAll(values, coprocessors_); - onChanged(); - } else { - coprocessorsBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearCoprocessors() { - if (coprocessorsBuilder_ == null) { - coprocessors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - } else { - coprocessorsBuilder_.clear(); - } - return this; - } - public Builder removeCoprocessors(int index) { - if (coprocessorsBuilder_ == null) { - ensureCoprocessorsIsMutable(); - coprocessors_.remove(index); - onChanged(); - } else { - coprocessorsBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder( - int index) { - return getCoprocessorsFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( - int index) { - if (coprocessorsBuilder_ == null) { - return coprocessors_.get(index); } else { - return coprocessorsBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getCoprocessorsOrBuilderList() { - if (coprocessorsBuilder_ != null) { - return coprocessorsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(coprocessors_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() { - return getCoprocessorsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder( - int index) { - return getCoprocessorsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); - } - public java.util.List - getCoprocessorsBuilderList() { - return getCoprocessorsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> - getCoprocessorsFieldBuilder() { - if (coprocessorsBuilder_ == null) { - coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( - coprocessors_, - ((bitField0_ & 0x00000020) == 0x00000020), - getParentForChildren(), - isClean()); - coprocessors_ = null; - } - return coprocessorsBuilder_; - } - - // optional uint64 reportStartTime = 7; - private long reportStartTime_ ; - public boolean hasReportStartTime() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public long getReportStartTime() { - return reportStartTime_; - } - public Builder setReportStartTime(long value) { - bitField0_ |= 0x00000040; - reportStartTime_ = value; - onChanged(); - return this; - } - public Builder clearReportStartTime() { - bitField0_ = (bitField0_ & ~0x00000040); - reportStartTime_ = 0L; - onChanged(); - return this; - } - - // optional uint64 reportEndTime = 8; - private long reportEndTime_ ; - public boolean hasReportEndTime() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public long getReportEndTime() { - return reportEndTime_; - } - public Builder setReportEndTime(long value) { - bitField0_ |= 0x00000080; - reportEndTime_ = value; - onChanged(); - return this; - } - public Builder clearReportEndTime() { - bitField0_ = (bitField0_ & ~0x00000080); - reportEndTime_ = 0L; - onChanged(); - return this; - } - - // optional uint32 infoServerPort = 9; - private int infoServerPort_ ; - public boolean hasInfoServerPort() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public int getInfoServerPort() { - return infoServerPort_; - } - public Builder setInfoServerPort(int value) { - bitField0_ |= 0x00000100; - infoServerPort_ = value; - onChanged(); - return this; - } - public Builder clearInfoServerPort() { - bitField0_ = (bitField0_ & ~0x00000100); - infoServerPort_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ServerLoad) - } - - static { - defaultInstance = new ServerLoad(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ServerLoad) - } - - public interface TimeRangeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional uint64 from = 1; - boolean hasFrom(); - long getFrom(); - - // optional uint64 to = 2; - boolean hasTo(); - long getTo(); - } - public static final class TimeRange extends - com.google.protobuf.GeneratedMessage - implements TimeRangeOrBuilder { - // Use TimeRange.newBuilder() to construct. - private TimeRange(Builder builder) { - super(builder); - } - private TimeRange(boolean noInit) {} - - private static final TimeRange defaultInstance; - public static TimeRange getDefaultInstance() { - return defaultInstance; - } - - public TimeRange getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; - } - - private int bitField0_; - // optional uint64 from = 1; - public static final int FROM_FIELD_NUMBER = 1; - private long from_; - public boolean hasFrom() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getFrom() { - return from_; - } - - // optional uint64 to = 2; - public static final int TO_FIELD_NUMBER = 2; - private long to_; - public boolean hasTo() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getTo() { - return to_; - } - - private void initFields() { - from_ = 0L; - to_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, from_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, to_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, from_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, to_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) obj; - - boolean result = true; - result = result && (hasFrom() == other.hasFrom()); - if (hasFrom()) { - result = result && (getFrom() - == other.getFrom()); - } - result = result && (hasTo() == other.hasTo()); - if (hasTo()) { - result = result && (getTo() - == other.getTo()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFrom()) { - hash = (37 * hash) + FROM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFrom()); - } - if (hasTo()) { - hash = (37 * hash) + TO_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTo()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - from_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - to_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.from_ = from_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.to_ = to_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; - if (other.hasFrom()) { - setFrom(other.getFrom()); - } - if (other.hasTo()) { - setTo(other.getTo()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - from_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - to_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // optional uint64 from = 1; - private long from_ ; - public boolean hasFrom() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getFrom() { - return from_; - } - public Builder setFrom(long value) { - bitField0_ |= 0x00000001; - from_ = value; - onChanged(); - return this; - } - public Builder clearFrom() { - bitField0_ = (bitField0_ & ~0x00000001); - from_ = 0L; - onChanged(); - return this; - } - - // optional uint64 to = 2; - private long to_ ; - public boolean hasTo() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getTo() { - return to_; - } - public Builder setTo(long value) { - bitField0_ |= 0x00000002; - to_ = value; - onChanged(); - return this; - } - public Builder clearTo() { - bitField0_ = (bitField0_ & ~0x00000002); - to_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:TimeRange) - } - - static { - defaultInstance = new TimeRange(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TimeRange) - } - - public interface FilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // optional bytes serializedFilter = 2; - boolean hasSerializedFilter(); - com.google.protobuf.ByteString getSerializedFilter(); - } - public static final class Filter extends - com.google.protobuf.GeneratedMessage - implements FilterOrBuilder { - // Use Filter.newBuilder() to construct. - private Filter(Builder builder) { - super(builder); - } - private Filter(boolean noInit) {} - - private static final Filter defaultInstance; - public static Filter getDefaultInstance() { - return defaultInstance; - } - - public Filter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes serializedFilter = 2; - public static final int SERIALIZEDFILTER_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString serializedFilter_; - public boolean hasSerializedFilter() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSerializedFilter() { - return serializedFilter_; - } - - private void initFields() { - name_ = ""; - serializedFilter_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, serializedFilter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, serializedFilter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasSerializedFilter() == other.hasSerializedFilter()); - if (hasSerializedFilter()) { - result = result && getSerializedFilter() - .equals(other.getSerializedFilter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasSerializedFilter()) { - hash = (37 * hash) + SERIALIZEDFILTER_FIELD_NUMBER; - hash = (53 * hash) + getSerializedFilter().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FilterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Filter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - serializedFilter_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.serializedFilter_ = serializedFilter_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasSerializedFilter()) { - setSerializedFilter(other.getSerializedFilter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - serializedFilter_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // optional bytes serializedFilter = 2; - private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSerializedFilter() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSerializedFilter() { - return serializedFilter_; - } - public Builder setSerializedFilter(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - serializedFilter_ = value; - onChanged(); - return this; - } - public Builder clearSerializedFilter() { - bitField0_ = (bitField0_ & ~0x00000002); - serializedFilter_ = getDefaultInstance().getSerializedFilter(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Filter) - } - - static { - defaultInstance = new Filter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Filter) - } - - public interface KeyValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required bytes family = 2; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required bytes qualifier = 3; - boolean hasQualifier(); - com.google.protobuf.ByteString getQualifier(); - - // optional uint64 timestamp = 4; - boolean hasTimestamp(); - long getTimestamp(); - - // optional .KeyType keyType = 5; - boolean hasKeyType(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType(); - - // optional bytes value = 6; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class KeyValue extends - com.google.protobuf.GeneratedMessage - implements KeyValueOrBuilder { - // Use KeyValue.newBuilder() to construct. - private KeyValue(Builder builder) { - super(builder); - } - private KeyValue(boolean noInit) {} - - private static final KeyValue defaultInstance; - public static KeyValue getDefaultInstance() { - return defaultInstance; - } - - public KeyValue getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required bytes family = 2; - public static final int FAMILY_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required bytes qualifier = 3; - public static final int QUALIFIER_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString qualifier_; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - // optional uint64 timestamp = 4; - public static final int TIMESTAMP_FIELD_NUMBER = 4; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getTimestamp() { - return timestamp_; - } - - // optional .KeyType keyType = 5; - public static final int KEYTYPE_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType keyType_; - public boolean hasKeyType() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType() { - return keyType_; - } - - // optional bytes value = 6; - public static final int VALUE_FIELD_NUMBER = 6; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - timestamp_ = 0L; - keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasQualifier()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, qualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(4, timestamp_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeEnum(5, keyType_.getNumber()); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBytes(6, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, qualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, timestamp_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(5, keyType_.getNumber()); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && (hasKeyType() == other.hasKeyType()); - if (hasKeyType()) { - result = result && - (getKeyType() == other.getKeyType()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - if (hasKeyType()) { - hash = (37 * hash) + KEYTYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getKeyType()); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; - bitField0_ = (bitField0_ & ~0x00000010); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.qualifier_ = qualifier_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.timestamp_ = timestamp_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.keyType_ = keyType_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - if (other.hasKeyType()) { - setKeyType(other.getKeyType()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasFamily()) { - - return false; - } - if (!hasQualifier()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - family_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - qualifier_ = input.readBytes(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - timestamp_ = input.readUInt64(); - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - keyType_ = value; - } - break; - } - case 50: { - bitField0_ |= 0x00000020; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required bytes family = 2; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000002); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required bytes qualifier = 3; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - qualifier_ = value; - onChanged(); - return this; - } - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000004); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // optional uint64 timestamp = 4; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000008; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000008); - timestamp_ = 0L; - onChanged(); - return this; - } - - // optional .KeyType keyType = 5; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; - public boolean hasKeyType() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType() { - return keyType_; - } - public Builder setKeyType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000010; - keyType_ = value; - onChanged(); - return this; - } - public Builder clearKeyType() { - bitField0_ = (bitField0_ & ~0x00000010); - keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; - onChanged(); - return this; - } - - // optional bytes value = 6; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000020; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000020); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:KeyValue) - } - - static { - defaultInstance = new KeyValue(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:KeyValue) - } - - public interface ServerNameOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string hostName = 1; - boolean hasHostName(); - String getHostName(); - - // optional uint32 port = 2; - boolean hasPort(); - int getPort(); - - // optional uint64 startCode = 3; - boolean hasStartCode(); - long getStartCode(); - } - public static final class ServerName extends - com.google.protobuf.GeneratedMessage - implements ServerNameOrBuilder { - // Use ServerName.newBuilder() to construct. - private ServerName(Builder builder) { - super(builder); - } - private ServerName(boolean noInit) {} - - private static final ServerName defaultInstance; - public static ServerName getDefaultInstance() { - return defaultInstance; - } - - public ServerName getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; - } - - private int bitField0_; - // required string hostName = 1; - public static final int HOSTNAME_FIELD_NUMBER = 1; - private java.lang.Object hostName_; - public boolean hasHostName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getHostName() { - java.lang.Object ref = hostName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - hostName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getHostNameBytes() { - java.lang.Object ref = hostName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - hostName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional uint32 port = 2; - public static final int PORT_FIELD_NUMBER = 2; - private int port_; - public boolean hasPort() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getPort() { - return port_; - } - - // optional uint64 startCode = 3; - public static final int STARTCODE_FIELD_NUMBER = 3; - private long startCode_; - public boolean hasStartCode() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getStartCode() { - return startCode_; - } - - private void initFields() { - hostName_ = ""; - port_ = 0; - startCode_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasHostName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getHostNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, port_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, startCode_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getHostNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, port_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, startCode_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) obj; - - boolean result = true; - result = result && (hasHostName() == other.hasHostName()); - if (hasHostName()) { - result = result && getHostName() - .equals(other.getHostName()); - } - result = result && (hasPort() == other.hasPort()); - if (hasPort()) { - result = result && (getPort() - == other.getPort()); - } - result = result && (hasStartCode() == other.hasStartCode()); - if (hasStartCode()) { - result = result && (getStartCode() - == other.getStartCode()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasHostName()) { - hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; - hash = (53 * hash) + getHostName().hashCode(); - } - if (hasPort()) { - hash = (37 * hash) + PORT_FIELD_NUMBER; - hash = (53 * hash) + getPort(); - } - if (hasStartCode()) { - hash = (37 * hash) + STARTCODE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStartCode()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - hostName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - port_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - startCode_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.hostName_ = hostName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.port_ = port_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.startCode_ = startCode_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; - if (other.hasHostName()) { - setHostName(other.getHostName()); - } - if (other.hasPort()) { - setPort(other.getPort()); - } - if (other.hasStartCode()) { - setStartCode(other.getStartCode()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasHostName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - hostName_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - port_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - startCode_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required string hostName = 1; - private java.lang.Object hostName_ = ""; - public boolean hasHostName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getHostName() { - java.lang.Object ref = hostName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - hostName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setHostName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - hostName_ = value; - onChanged(); - return this; - } - public Builder clearHostName() { - bitField0_ = (bitField0_ & ~0x00000001); - hostName_ = getDefaultInstance().getHostName(); - onChanged(); - return this; - } - void setHostName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - hostName_ = value; - onChanged(); - } - - // optional uint32 port = 2; - private int port_ ; - public boolean hasPort() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getPort() { - return port_; - } - public Builder setPort(int value) { - bitField0_ |= 0x00000002; - port_ = value; - onChanged(); - return this; - } - public Builder clearPort() { - bitField0_ = (bitField0_ & ~0x00000002); - port_ = 0; - onChanged(); - return this; - } - - // optional uint64 startCode = 3; - private long startCode_ ; - public boolean hasStartCode() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getStartCode() { - return startCode_; - } - public Builder setStartCode(long value) { - bitField0_ |= 0x00000004; - startCode_ = value; - onChanged(); - return this; - } - public Builder clearStartCode() { - bitField0_ = (bitField0_ & ~0x00000004); - startCode_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ServerName) - } - - static { - defaultInstance = new ServerName(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ServerName) - } - - public interface CoprocessorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - } - public static final class Coprocessor extends - com.google.protobuf.GeneratedMessage - implements CoprocessorOrBuilder { - // Use Coprocessor.newBuilder() to construct. - private Coprocessor(Builder builder) { - super(builder); - } - private Coprocessor(boolean noInit) {} - - private static final Coprocessor defaultInstance; - public static Coprocessor getDefaultInstance() { - return defaultInstance; - } - - public Coprocessor getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - name_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:Coprocessor) - } - - static { - defaultInstance = new Coprocessor(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Coprocessor) - } - - public interface NameStringPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // required string value = 2; - boolean hasValue(); - String getValue(); - } - public static final class NameStringPair extends - com.google.protobuf.GeneratedMessage - implements NameStringPairOrBuilder { - // Use NameStringPair.newBuilder() to construct. - private NameStringPair(Builder builder) { - super(builder); - } - private NameStringPair(boolean noInit) {} - - private static final NameStringPair defaultInstance; - public static NameStringPair getDefaultInstance() { - return defaultInstance; - } - - public NameStringPair getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private java.lang.Object value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getValue() { - java.lang.Object ref = value_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - value_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - name_ = ""; - value_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getValueBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getValueBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // required string value = 2; - private java.lang.Object value_ = ""; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getValue() { - java.lang.Object ref = value_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - value_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setValue(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - void setValue(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:NameStringPair) - } - - static { - defaultInstance = new NameStringPair(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:NameStringPair) - } - - public interface NameBytesPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // optional bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class NameBytesPair extends - com.google.protobuf.GeneratedMessage - implements NameBytesPairOrBuilder { - // Use NameBytesPair.newBuilder() to construct. - private NameBytesPair(Builder builder) { - super(builder); - } - private NameBytesPair(boolean noInit) {} - - private static final NameBytesPair defaultInstance; - public static NameBytesPair getDefaultInstance() { - return defaultInstance; - } - - public NameBytesPair getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - name_ = ""; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // optional bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:NameBytesPair) - } - - static { - defaultInstance = new NameBytesPair(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:NameBytesPair) - } - - public interface BytesBytesPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes first = 1; - boolean hasFirst(); - com.google.protobuf.ByteString getFirst(); - - // required bytes second = 2; - boolean hasSecond(); - com.google.protobuf.ByteString getSecond(); - } - public static final class BytesBytesPair extends - com.google.protobuf.GeneratedMessage - implements BytesBytesPairOrBuilder { - // Use BytesBytesPair.newBuilder() to construct. - private BytesBytesPair(Builder builder) { - super(builder); - } - private BytesBytesPair(boolean noInit) {} - - private static final BytesBytesPair defaultInstance; - public static BytesBytesPair getDefaultInstance() { - return defaultInstance; - } - - public BytesBytesPair getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable; - } - - private int bitField0_; - // required bytes first = 1; - public static final int FIRST_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString first_; - public boolean hasFirst() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFirst() { - return first_; - } - - // required bytes second = 2; - public static final int SECOND_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString second_; - public boolean hasSecond() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSecond() { - return second_; - } - - private void initFields() { - first_ = com.google.protobuf.ByteString.EMPTY; - second_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFirst()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasSecond()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, first_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, second_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, first_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, second_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) obj; - - boolean result = true; - result = result && (hasFirst() == other.hasFirst()); - if (hasFirst()) { - result = result && getFirst() - .equals(other.getFirst()); - } - result = result && (hasSecond() == other.hasSecond()); - if (hasSecond()) { - result = result && getSecond() - .equals(other.getSecond()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFirst()) { - hash = (37 * hash) + FIRST_FIELD_NUMBER; - hash = (53 * hash) + getFirst().hashCode(); - } - if (hasSecond()) { - hash = (37 * hash) + SECOND_FIELD_NUMBER; - hash = (53 * hash) + getSecond().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - first_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - second_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.first_ = first_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.second_ = second_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()) return this; - if (other.hasFirst()) { - setFirst(other.getFirst()); - } - if (other.hasSecond()) { - setSecond(other.getSecond()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFirst()) { - - return false; - } - if (!hasSecond()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - first_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - second_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes first = 1; - private com.google.protobuf.ByteString first_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFirst() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFirst() { - return first_; - } - public Builder setFirst(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - first_ = value; - onChanged(); - return this; - } - public Builder clearFirst() { - bitField0_ = (bitField0_ & ~0x00000001); - first_ = getDefaultInstance().getFirst(); - onChanged(); - return this; - } - - // required bytes second = 2; - private com.google.protobuf.ByteString second_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSecond() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSecond() { - return second_; - } - public Builder setSecond(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - second_ = value; - onChanged(); - return this; - } - public Builder clearSecond() { - bitField0_ = (bitField0_ & ~0x00000002); - second_ = getDefaultInstance().getSecond(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:BytesBytesPair) - } - - static { - defaultInstance = new BytesBytesPair(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BytesBytesPair) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TableSchema_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TableSchema_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TableSchema_Attribute_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TableSchema_Attribute_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnFamilySchema_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnFamilySchema_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnFamilySchema_Attribute_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionSpecifier_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionSpecifier_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionLoad_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionLoad_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ServerLoad_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ServerLoad_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TimeRange_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TimeRange_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Filter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Filter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_KeyValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_KeyValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ServerName_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ServerName_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Coprocessor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Coprocessor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_NameStringPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_NameStringPair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_NameBytesPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_NameBytesPair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BytesBytesPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BytesBytesPair_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\013hbase.proto\"\236\001\n\013TableSchema\022\014\n\004name\030\001 " + - "\001(\014\022*\n\nattributes\030\002 \003(\0132\026.TableSchema.At" + - "tribute\022+\n\016columnFamilies\030\003 \003(\0132\023.Column" + - "FamilySchema\032(\n\tAttribute\022\014\n\004name\030\001 \002(\014\022" + - "\r\n\005value\030\002 \002(\014\"\177\n\022ColumnFamilySchema\022\014\n\004" + - "name\030\001 \002(\014\0221\n\nattributes\030\002 \003(\0132\035.ColumnF" + - "amilySchema.Attribute\032(\n\tAttribute\022\014\n\004na" + - "me\030\001 \002(\014\022\r\n\005value\030\002 \002(\014\"s\n\nRegionInfo\022\020\n" + - "\010regionId\030\001 \002(\004\022\021\n\ttableName\030\002 \002(\014\022\020\n\010st" + - "artKey\030\003 \001(\014\022\016\n\006endKey\030\004 \001(\014\022\017\n\007offline\030", - "\005 \001(\010\022\r\n\005split\030\006 \001(\010\"\225\001\n\017RegionSpecifier" + - "\0222\n\004type\030\001 \002(\0162$.RegionSpecifier.RegionS" + - "pecifierType\022\r\n\005value\030\002 \002(\014\"?\n\023RegionSpe" + - "cifierType\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCODED_R" + - "EGION_NAME\020\002\"\324\003\n\nRegionLoad\022)\n\017regionSpe" + - "cifier\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006stores" + - "\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(\r\022\037\n\027storeUncom" + - "pressedSizeMB\030\004 \001(\r\022\027\n\017storefileSizeMB\030\005" + - " \001(\r\022\026\n\016memstoreSizeMB\030\006 \001(\r\022\034\n\024storefil" + - "eIndexSizeMB\030\007 \001(\r\022\031\n\021readRequestsCount\030", - "\010 \001(\004\022\032\n\022writeRequestsCount\030\t \001(\004\022\032\n\022tot" + - "alCompactingKVs\030\n \001(\004\022\033\n\023currentCompacte" + - "dKVs\030\013 \001(\004\022\027\n\017rootIndexSizeKB\030\014 \001(\r\022\036\n\026t" + - "otalStaticIndexSizeKB\030\r \001(\r\022\036\n\026totalStat" + - "icBloomSizeKB\030\016 \001(\r\022\"\n\014coprocessors\030\017 \003(" + - "\0132\014.Coprocessor\022\032\n\022completeSequenceId\030\020 " + - "\001(\004\"\372\001\n\nServerLoad\022\030\n\020numberOfRequests\030\001" + - " \001(\r\022\035\n\025totalNumberOfRequests\030\002 \001(\r\022\022\n\nu" + - "sedHeapMB\030\003 \001(\r\022\021\n\tmaxHeapMB\030\004 \001(\r\022 \n\013re" + - "gionLoads\030\005 \003(\0132\013.RegionLoad\022\"\n\014coproces", - "sors\030\006 \003(\0132\014.Coprocessor\022\027\n\017reportStartT" + - "ime\030\007 \001(\004\022\025\n\rreportEndTime\030\010 \001(\004\022\026\n\016info" + - "ServerPort\030\t \001(\r\"%\n\tTimeRange\022\014\n\004from\030\001 " + - "\001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Filter\022\014\n\004name\030\001 \002(\t\022" + - "\030\n\020serializedFilter\030\002 \001(\014\"w\n\010KeyValue\022\013\n" + - "\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifier\030" + - "\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004\022\031\n\007keyType\030\005 \001(" + - "\0162\010.KeyType\022\r\n\005value\030\006 \001(\014\"?\n\nServerName" + - "\022\020\n\010hostName\030\001 \002(\t\022\014\n\004port\030\002 \001(\r\022\021\n\tstar" + - "tCode\030\003 \001(\004\"\033\n\013Coprocessor\022\014\n\004name\030\001 \002(\t", - "\"-\n\016NameStringPair\022\014\n\004name\030\001 \002(\t\022\r\n\005valu" + - "e\030\002 \002(\t\",\n\rNameBytesPair\022\014\n\004name\030\001 \002(\t\022\r" + - "\n\005value\030\002 \001(\014\"/\n\016BytesBytesPair\022\r\n\005first" + - "\030\001 \002(\014\022\016\n\006second\030\002 \002(\014*r\n\013CompareType\022\010\n" + - "\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n" + - "\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GR" + - "EATER\020\005\022\t\n\005NO_OP\020\006*_\n\007KeyType\022\013\n\007MINIMUM" + - "\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN" + - "\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*o" + - "rg.apache.hadoop.hbase.protobuf.generate", - "dB\013HBaseProtosH\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_TableSchema_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_TableSchema_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TableSchema_descriptor, - new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); - internal_static_TableSchema_Attribute_descriptor = - internal_static_TableSchema_descriptor.getNestedTypes().get(0); - internal_static_TableSchema_Attribute_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TableSchema_Attribute_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Attribute.Builder.class); - internal_static_ColumnFamilySchema_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_ColumnFamilySchema_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnFamilySchema_descriptor, - new java.lang.String[] { "Name", "Attributes", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); - internal_static_ColumnFamilySchema_Attribute_descriptor = - internal_static_ColumnFamilySchema_descriptor.getNestedTypes().get(0); - internal_static_ColumnFamilySchema_Attribute_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnFamilySchema_Attribute_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Attribute.Builder.class); - internal_static_RegionInfo_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_RegionInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionInfo_descriptor, - new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); - internal_static_RegionSpecifier_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_RegionSpecifier_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionSpecifier_descriptor, - new java.lang.String[] { "Type", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); - internal_static_RegionLoad_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_RegionLoad_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionLoad_descriptor, - new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "Coprocessors", "CompleteSequenceId", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder.class); - internal_static_ServerLoad_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_ServerLoad_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ServerLoad_descriptor, - new java.lang.String[] { "NumberOfRequests", "TotalNumberOfRequests", "UsedHeapMB", "MaxHeapMB", "RegionLoads", "Coprocessors", "ReportStartTime", "ReportEndTime", "InfoServerPort", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder.class); - internal_static_TimeRange_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_TimeRange_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TimeRange_descriptor, - new java.lang.String[] { "From", "To", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); - internal_static_Filter_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_Filter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Filter_descriptor, - new java.lang.String[] { "Name", "SerializedFilter", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Filter.Builder.class); - internal_static_KeyValue_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_KeyValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_KeyValue_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder.class); - internal_static_ServerName_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_ServerName_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ServerName_descriptor, - new java.lang.String[] { "HostName", "Port", "StartCode", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); - internal_static_Coprocessor_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_Coprocessor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Coprocessor_descriptor, - new java.lang.String[] { "Name", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); - internal_static_NameStringPair_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_NameStringPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_NameStringPair_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); - internal_static_NameBytesPair_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_NameBytesPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_NameBytesPair_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); - internal_static_BytesBytesPair_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_BytesBytesPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BytesBytesPair_descriptor, - new java.lang.String[] { "First", "Second", }, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java deleted file mode 100644 index 48243e6..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java +++ /dev/null @@ -1,424 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: LoadBalancer.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class LoadBalancerProtos { - private LoadBalancerProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface LoadBalancerStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bool balancerOn = 1; - boolean hasBalancerOn(); - boolean getBalancerOn(); - } - public static final class LoadBalancerState extends - com.google.protobuf.GeneratedMessage - implements LoadBalancerStateOrBuilder { - // Use LoadBalancerState.newBuilder() to construct. - private LoadBalancerState(Builder builder) { - super(builder); - } - private LoadBalancerState(boolean noInit) {} - - private static final LoadBalancerState defaultInstance; - public static LoadBalancerState getDefaultInstance() { - return defaultInstance; - } - - public LoadBalancerState getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable; - } - - private int bitField0_; - // optional bool balancerOn = 1; - public static final int BALANCERON_FIELD_NUMBER = 1; - private boolean balancerOn_; - public boolean hasBalancerOn() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getBalancerOn() { - return balancerOn_; - } - - private void initFields() { - balancerOn_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, balancerOn_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, balancerOn_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) obj; - - boolean result = true; - result = result && (hasBalancerOn() == other.hasBalancerOn()); - if (hasBalancerOn()) { - result = result && (getBalancerOn() - == other.getBalancerOn()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasBalancerOn()) { - hash = (37 * hash) + BALANCERON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBalancerOn()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - balancerOn_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState build() { - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.balancerOn_ = balancerOn_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance()) return this; - if (other.hasBalancerOn()) { - setBalancerOn(other.getBalancerOn()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - balancerOn_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional bool balancerOn = 1; - private boolean balancerOn_ ; - public boolean hasBalancerOn() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getBalancerOn() { - return balancerOn_; - } - public Builder setBalancerOn(boolean value) { - bitField0_ |= 0x00000001; - balancerOn_ = value; - onChanged(); - return this; - } - public Builder clearBalancerOn() { - bitField0_ = (bitField0_ & ~0x00000001); - balancerOn_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:LoadBalancerState) - } - - static { - defaultInstance = new LoadBalancerState(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:LoadBalancerState) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_LoadBalancerState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_LoadBalancerState_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\022LoadBalancer.proto\"\'\n\021LoadBalancerStat" + - "e\022\022\n\nbalancerOn\030\001 \001(\010BE\n*org.apache.hado" + - "op.hbase.protobuf.generatedB\022LoadBalance" + - "rProtosH\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_LoadBalancerState_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_LoadBalancerState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_LoadBalancerState_descriptor, - new java.lang.String[] { "BalancerOn", }, - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, - org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java deleted file mode 100644 index 71be12a..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java +++ /dev/null @@ -1,16419 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: MasterAdmin.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class MasterAdminProtos { - private MasterAdminProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface AddColumnRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required .ColumnFamilySchema columnFamilies = 2; - boolean hasColumnFamilies(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); - } - public static final class AddColumnRequest extends - com.google.protobuf.GeneratedMessage - implements AddColumnRequestOrBuilder { - // Use AddColumnRequest.newBuilder() to construct. - private AddColumnRequest(Builder builder) { - super(builder); - } - private AddColumnRequest(boolean noInit) {} - - private static final AddColumnRequest defaultInstance; - public static AddColumnRequest getDefaultInstance() { - return defaultInstance; - } - - public AddColumnRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required .ColumnFamilySchema columnFamilies = 2; - public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; - public boolean hasColumnFamilies() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { - return columnFamilies_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { - return columnFamilies_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasColumnFamilies()) { - memoizedIsInitialized = 0; - return false; - } - if (!getColumnFamilies().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, columnFamilies_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, columnFamilies_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasColumnFamilies() == other.hasColumnFamilies()); - if (hasColumnFamilies()) { - result = result && getColumnFamilies() - .equals(other.getColumnFamilies()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasColumnFamilies()) { - hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamilies().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnFamiliesFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - } else { - columnFamiliesBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (columnFamiliesBuilder_ == null) { - result.columnFamilies_ = columnFamilies_; - } else { - result.columnFamilies_ = columnFamiliesBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasColumnFamilies()) { - mergeColumnFamilies(other.getColumnFamilies()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - if (!hasColumnFamilies()) { - - return false; - } - if (!getColumnFamilies().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); - if (hasColumnFamilies()) { - subBuilder.mergeFrom(getColumnFamilies()); - } - input.readMessage(subBuilder, extensionRegistry); - setColumnFamilies(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required .ColumnFamilySchema columnFamilies = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; - public boolean hasColumnFamilies() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { - if (columnFamiliesBuilder_ == null) { - return columnFamilies_; - } else { - return columnFamiliesBuilder_.getMessage(); - } - } - public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - columnFamilies_ = value; - onChanged(); - } else { - columnFamiliesBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setColumnFamilies( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = builderForValue.build(); - onChanged(); - } else { - columnFamiliesBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { - columnFamilies_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); - } else { - columnFamilies_ = value; - } - onChanged(); - } else { - columnFamiliesBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearColumnFamilies() { - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - onChanged(); - } else { - columnFamiliesBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getColumnFamiliesFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { - if (columnFamiliesBuilder_ != null) { - return columnFamiliesBuilder_.getMessageOrBuilder(); - } else { - return columnFamilies_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> - getColumnFamiliesFieldBuilder() { - if (columnFamiliesBuilder_ == null) { - columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnFamilies_, - getParentForChildren(), - isClean()); - columnFamilies_ = null; - } - return columnFamiliesBuilder_; - } - - // @@protoc_insertion_point(builder_scope:AddColumnRequest) - } - - static { - defaultInstance = new AddColumnRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AddColumnRequest) - } - - public interface AddColumnResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class AddColumnResponse extends - com.google.protobuf.GeneratedMessage - implements AddColumnResponseOrBuilder { - // Use AddColumnResponse.newBuilder() to construct. - private AddColumnResponse(Builder builder) { - super(builder); - } - private AddColumnResponse(boolean noInit) {} - - private static final AddColumnResponse defaultInstance; - public static AddColumnResponse getDefaultInstance() { - return defaultInstance; - } - - public AddColumnResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:AddColumnResponse) - } - - static { - defaultInstance = new AddColumnResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AddColumnResponse) - } - - public interface DeleteColumnRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required bytes columnName = 2; - boolean hasColumnName(); - com.google.protobuf.ByteString getColumnName(); - } - public static final class DeleteColumnRequest extends - com.google.protobuf.GeneratedMessage - implements DeleteColumnRequestOrBuilder { - // Use DeleteColumnRequest.newBuilder() to construct. - private DeleteColumnRequest(Builder builder) { - super(builder); - } - private DeleteColumnRequest(boolean noInit) {} - - private static final DeleteColumnRequest defaultInstance; - public static DeleteColumnRequest getDefaultInstance() { - return defaultInstance; - } - - public DeleteColumnRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required bytes columnName = 2; - public static final int COLUMNNAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString columnName_; - public boolean hasColumnName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getColumnName() { - return columnName_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - columnName_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasColumnName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, columnName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, columnName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasColumnName() == other.hasColumnName()); - if (hasColumnName()) { - result = result && getColumnName() - .equals(other.getColumnName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasColumnName()) { - hash = (37 * hash) + COLUMNNAME_FIELD_NUMBER; - hash = (53 * hash) + getColumnName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - columnName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.columnName_ = columnName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasColumnName()) { - setColumnName(other.getColumnName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - if (!hasColumnName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - columnName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required bytes columnName = 2; - private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasColumnName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getColumnName() { - return columnName_; - } - public Builder setColumnName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - columnName_ = value; - onChanged(); - return this; - } - public Builder clearColumnName() { - bitField0_ = (bitField0_ & ~0x00000002); - columnName_ = getDefaultInstance().getColumnName(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:DeleteColumnRequest) - } - - static { - defaultInstance = new DeleteColumnRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DeleteColumnRequest) - } - - public interface DeleteColumnResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class DeleteColumnResponse extends - com.google.protobuf.GeneratedMessage - implements DeleteColumnResponseOrBuilder { - // Use DeleteColumnResponse.newBuilder() to construct. - private DeleteColumnResponse(Builder builder) { - super(builder); - } - private DeleteColumnResponse(boolean noInit) {} - - private static final DeleteColumnResponse defaultInstance; - public static DeleteColumnResponse getDefaultInstance() { - return defaultInstance; - } - - public DeleteColumnResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:DeleteColumnResponse) - } - - static { - defaultInstance = new DeleteColumnResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DeleteColumnResponse) - } - - public interface ModifyColumnRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required .ColumnFamilySchema columnFamilies = 2; - boolean hasColumnFamilies(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); - } - public static final class ModifyColumnRequest extends - com.google.protobuf.GeneratedMessage - implements ModifyColumnRequestOrBuilder { - // Use ModifyColumnRequest.newBuilder() to construct. - private ModifyColumnRequest(Builder builder) { - super(builder); - } - private ModifyColumnRequest(boolean noInit) {} - - private static final ModifyColumnRequest defaultInstance; - public static ModifyColumnRequest getDefaultInstance() { - return defaultInstance; - } - - public ModifyColumnRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required .ColumnFamilySchema columnFamilies = 2; - public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; - public boolean hasColumnFamilies() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { - return columnFamilies_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { - return columnFamilies_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasColumnFamilies()) { - memoizedIsInitialized = 0; - return false; - } - if (!getColumnFamilies().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, columnFamilies_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, columnFamilies_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasColumnFamilies() == other.hasColumnFamilies()); - if (hasColumnFamilies()) { - result = result && getColumnFamilies() - .equals(other.getColumnFamilies()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasColumnFamilies()) { - hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamilies().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnFamiliesFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - } else { - columnFamiliesBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (columnFamiliesBuilder_ == null) { - result.columnFamilies_ = columnFamilies_; - } else { - result.columnFamilies_ = columnFamiliesBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasColumnFamilies()) { - mergeColumnFamilies(other.getColumnFamilies()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - if (!hasColumnFamilies()) { - - return false; - } - if (!getColumnFamilies().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); - if (hasColumnFamilies()) { - subBuilder.mergeFrom(getColumnFamilies()); - } - input.readMessage(subBuilder, extensionRegistry); - setColumnFamilies(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required .ColumnFamilySchema columnFamilies = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; - public boolean hasColumnFamilies() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { - if (columnFamiliesBuilder_ == null) { - return columnFamilies_; - } else { - return columnFamiliesBuilder_.getMessage(); - } - } - public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - columnFamilies_ = value; - onChanged(); - } else { - columnFamiliesBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setColumnFamilies( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = builderForValue.build(); - onChanged(); - } else { - columnFamiliesBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { - if (columnFamiliesBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { - columnFamilies_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); - } else { - columnFamilies_ = value; - } - onChanged(); - } else { - columnFamiliesBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearColumnFamilies() { - if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - onChanged(); - } else { - columnFamiliesBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getColumnFamiliesFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { - if (columnFamiliesBuilder_ != null) { - return columnFamiliesBuilder_.getMessageOrBuilder(); - } else { - return columnFamilies_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> - getColumnFamiliesFieldBuilder() { - if (columnFamiliesBuilder_ == null) { - columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnFamilies_, - getParentForChildren(), - isClean()); - columnFamilies_ = null; - } - return columnFamiliesBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ModifyColumnRequest) - } - - static { - defaultInstance = new ModifyColumnRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ModifyColumnRequest) - } - - public interface ModifyColumnResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ModifyColumnResponse extends - com.google.protobuf.GeneratedMessage - implements ModifyColumnResponseOrBuilder { - // Use ModifyColumnResponse.newBuilder() to construct. - private ModifyColumnResponse(Builder builder) { - super(builder); - } - private ModifyColumnResponse(boolean noInit) {} - - private static final ModifyColumnResponse defaultInstance; - public static ModifyColumnResponse getDefaultInstance() { - return defaultInstance; - } - - public ModifyColumnResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ModifyColumnResponse) - } - - static { - defaultInstance = new ModifyColumnResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ModifyColumnResponse) - } - - public interface MoveRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional .ServerName destServerName = 2; - boolean hasDestServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder(); - } - public static final class MoveRegionRequest extends - com.google.protobuf.GeneratedMessage - implements MoveRegionRequestOrBuilder { - // Use MoveRegionRequest.newBuilder() to construct. - private MoveRegionRequest(Builder builder) { - super(builder); - } - private MoveRegionRequest(boolean noInit) {} - - private static final MoveRegionRequest defaultInstance; - public static MoveRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public MoveRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional .ServerName destServerName = 2; - public static final int DESTSERVERNAME_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_; - public boolean hasDestServerName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { - return destServerName_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { - return destServerName_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (hasDestServerName()) { - if (!getDestServerName().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, destServerName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, destServerName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasDestServerName() == other.hasDestServerName()); - if (hasDestServerName()) { - result = result && getDestServerName() - .equals(other.getDestServerName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasDestServerName()) { - hash = (37 * hash) + DESTSERVERNAME_FIELD_NUMBER; - hash = (53 * hash) + getDestServerName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getDestServerNameFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (destServerNameBuilder_ == null) { - destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - destServerNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (destServerNameBuilder_ == null) { - result.destServerName_ = destServerName_; - } else { - result.destServerName_ = destServerNameBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasDestServerName()) { - mergeDestServerName(other.getDestServerName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (hasDestServerName()) { - if (!getDestServerName().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasDestServerName()) { - subBuilder.mergeFrom(getDestServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setDestServerName(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional .ServerName destServerName = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_; - public boolean hasDestServerName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { - if (destServerNameBuilder_ == null) { - return destServerName_; - } else { - return destServerNameBuilder_.getMessage(); - } - } - public Builder setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (destServerNameBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - destServerName_ = value; - onChanged(); - } else { - destServerNameBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setDestServerName( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (destServerNameBuilder_ == null) { - destServerName_ = builderForValue.build(); - onChanged(); - } else { - destServerNameBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (destServerNameBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - destServerName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - destServerName_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destServerName_).mergeFrom(value).buildPartial(); - } else { - destServerName_ = value; - } - onChanged(); - } else { - destServerNameBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearDestServerName() { - if (destServerNameBuilder_ == null) { - destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - destServerNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestServerNameBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getDestServerNameFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { - if (destServerNameBuilder_ != null) { - return destServerNameBuilder_.getMessageOrBuilder(); - } else { - return destServerName_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getDestServerNameFieldBuilder() { - if (destServerNameBuilder_ == null) { - destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - destServerName_, - getParentForChildren(), - isClean()); - destServerName_ = null; - } - return destServerNameBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MoveRegionRequest) - } - - static { - defaultInstance = new MoveRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MoveRegionRequest) - } - - public interface MoveRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class MoveRegionResponse extends - com.google.protobuf.GeneratedMessage - implements MoveRegionResponseOrBuilder { - // Use MoveRegionResponse.newBuilder() to construct. - private MoveRegionResponse(Builder builder) { - super(builder); - } - private MoveRegionResponse(boolean noInit) {} - - private static final MoveRegionResponse defaultInstance; - public static MoveRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public MoveRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:MoveRegionResponse) - } - - static { - defaultInstance = new MoveRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MoveRegionResponse) - } - - public interface AssignRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - } - public static final class AssignRegionRequest extends - com.google.protobuf.GeneratedMessage - implements AssignRegionRequestOrBuilder { - // Use AssignRegionRequest.newBuilder() to construct. - private AssignRegionRequest(Builder builder) { - super(builder); - } - private AssignRegionRequest(boolean noInit) {} - - private static final AssignRegionRequest defaultInstance; - public static AssignRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public AssignRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:AssignRegionRequest) - } - - static { - defaultInstance = new AssignRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AssignRegionRequest) - } - - public interface AssignRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class AssignRegionResponse extends - com.google.protobuf.GeneratedMessage - implements AssignRegionResponseOrBuilder { - // Use AssignRegionResponse.newBuilder() to construct. - private AssignRegionResponse(Builder builder) { - super(builder); - } - private AssignRegionResponse(boolean noInit) {} - - private static final AssignRegionResponse defaultInstance; - public static AssignRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public AssignRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:AssignRegionResponse) - } - - static { - defaultInstance = new AssignRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AssignRegionResponse) - } - - public interface UnassignRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional bool force = 2 [default = false]; - boolean hasForce(); - boolean getForce(); - } - public static final class UnassignRegionRequest extends - com.google.protobuf.GeneratedMessage - implements UnassignRegionRequestOrBuilder { - // Use UnassignRegionRequest.newBuilder() to construct. - private UnassignRegionRequest(Builder builder) { - super(builder); - } - private UnassignRegionRequest(boolean noInit) {} - - private static final UnassignRegionRequest defaultInstance; - public static UnassignRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public UnassignRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional bool force = 2 [default = false]; - public static final int FORCE_FIELD_NUMBER = 2; - private boolean force_; - public boolean hasForce() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getForce() { - return force_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - force_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, force_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, force_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasForce() == other.hasForce()); - if (hasForce()) { - result = result && (getForce() - == other.getForce()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasForce()) { - hash = (37 * hash) + FORCE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getForce()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - force_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.force_ = force_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasForce()) { - setForce(other.getForce()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - force_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional bool force = 2 [default = false]; - private boolean force_ ; - public boolean hasForce() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getForce() { - return force_; - } - public Builder setForce(boolean value) { - bitField0_ |= 0x00000002; - force_ = value; - onChanged(); - return this; - } - public Builder clearForce() { - bitField0_ = (bitField0_ & ~0x00000002); - force_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UnassignRegionRequest) - } - - static { - defaultInstance = new UnassignRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UnassignRegionRequest) - } - - public interface UnassignRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class UnassignRegionResponse extends - com.google.protobuf.GeneratedMessage - implements UnassignRegionResponseOrBuilder { - // Use UnassignRegionResponse.newBuilder() to construct. - private UnassignRegionResponse(Builder builder) { - super(builder); - } - private UnassignRegionResponse(boolean noInit) {} - - private static final UnassignRegionResponse defaultInstance; - public static UnassignRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public UnassignRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:UnassignRegionResponse) - } - - static { - defaultInstance = new UnassignRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UnassignRegionResponse) - } - - public interface OfflineRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - } - public static final class OfflineRegionRequest extends - com.google.protobuf.GeneratedMessage - implements OfflineRegionRequestOrBuilder { - // Use OfflineRegionRequest.newBuilder() to construct. - private OfflineRegionRequest(Builder builder) { - super(builder); - } - private OfflineRegionRequest(boolean noInit) {} - - private static final OfflineRegionRequest defaultInstance; - public static OfflineRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public OfflineRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:OfflineRegionRequest) - } - - static { - defaultInstance = new OfflineRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OfflineRegionRequest) - } - - public interface OfflineRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class OfflineRegionResponse extends - com.google.protobuf.GeneratedMessage - implements OfflineRegionResponseOrBuilder { - // Use OfflineRegionResponse.newBuilder() to construct. - private OfflineRegionResponse(Builder builder) { - super(builder); - } - private OfflineRegionResponse(boolean noInit) {} - - private static final OfflineRegionResponse defaultInstance; - public static OfflineRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public OfflineRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:OfflineRegionResponse) - } - - static { - defaultInstance = new OfflineRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OfflineRegionResponse) - } - - public interface CreateTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .TableSchema tableSchema = 1; - boolean hasTableSchema(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - - // repeated bytes splitKeys = 2; - java.util.List getSplitKeysList(); - int getSplitKeysCount(); - com.google.protobuf.ByteString getSplitKeys(int index); - } - public static final class CreateTableRequest extends - com.google.protobuf.GeneratedMessage - implements CreateTableRequestOrBuilder { - // Use CreateTableRequest.newBuilder() to construct. - private CreateTableRequest(Builder builder) { - super(builder); - } - private CreateTableRequest(boolean noInit) {} - - private static final CreateTableRequest defaultInstance; - public static CreateTableRequest getDefaultInstance() { - return defaultInstance; - } - - public CreateTableRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .TableSchema tableSchema = 1; - public static final int TABLESCHEMA_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; - public boolean hasTableSchema() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; - } - - // repeated bytes splitKeys = 2; - public static final int SPLITKEYS_FIELD_NUMBER = 2; - private java.util.List splitKeys_; - public java.util.List - getSplitKeysList() { - return splitKeys_; - } - public int getSplitKeysCount() { - return splitKeys_.size(); - } - public com.google.protobuf.ByteString getSplitKeys(int index) { - return splitKeys_.get(index); - } - - private void initFields() { - tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - splitKeys_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableSchema()) { - memoizedIsInitialized = 0; - return false; - } - if (!getTableSchema().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableSchema_); - } - for (int i = 0; i < splitKeys_.size(); i++) { - output.writeBytes(2, splitKeys_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableSchema_); - } - { - int dataSize = 0; - for (int i = 0; i < splitKeys_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(splitKeys_.get(i)); - } - size += dataSize; - size += 1 * getSplitKeysList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) obj; - - boolean result = true; - result = result && (hasTableSchema() == other.hasTableSchema()); - if (hasTableSchema()) { - result = result && getTableSchema() - .equals(other.getTableSchema()); - } - result = result && getSplitKeysList() - .equals(other.getSplitKeysList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableSchema()) { - hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; - hash = (53 * hash) + getTableSchema().hashCode(); - } - if (getSplitKeysCount() > 0) { - hash = (37 * hash) + SPLITKEYS_FIELD_NUMBER; - hash = (53 * hash) + getSplitKeysList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTableSchemaFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } else { - tableSchemaBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - splitKeys_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (tableSchemaBuilder_ == null) { - result.tableSchema_ = tableSchema_; - } else { - result.tableSchema_ = tableSchemaBuilder_.build(); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.splitKeys_ = splitKeys_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance()) return this; - if (other.hasTableSchema()) { - mergeTableSchema(other.getTableSchema()); - } - if (!other.splitKeys_.isEmpty()) { - if (splitKeys_.isEmpty()) { - splitKeys_ = other.splitKeys_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureSplitKeysIsMutable(); - splitKeys_.addAll(other.splitKeys_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableSchema()) { - - return false; - } - if (!getTableSchema().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); - if (hasTableSchema()) { - subBuilder.mergeFrom(getTableSchema()); - } - input.readMessage(subBuilder, extensionRegistry); - setTableSchema(subBuilder.buildPartial()); - break; - } - case 18: { - ensureSplitKeysIsMutable(); - splitKeys_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required .TableSchema tableSchema = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; - public boolean hasTableSchema() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - if (tableSchemaBuilder_ == null) { - return tableSchema_; - } else { - return tableSchemaBuilder_.getMessage(); - } - } - public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - tableSchema_ = value; - onChanged(); - } else { - tableSchemaBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setTableSchema( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { - if (tableSchemaBuilder_ == null) { - tableSchema_ = builderForValue.build(); - onChanged(); - } else { - tableSchemaBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { - tableSchema_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); - } else { - tableSchema_ = value; - } - onChanged(); - } else { - tableSchemaBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearTableSchema() { - if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - onChanged(); - } else { - tableSchemaBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getTableSchemaFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - if (tableSchemaBuilder_ != null) { - return tableSchemaBuilder_.getMessageOrBuilder(); - } else { - return tableSchema_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> - getTableSchemaFieldBuilder() { - if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, - getParentForChildren(), - isClean()); - tableSchema_ = null; - } - return tableSchemaBuilder_; - } - - // repeated bytes splitKeys = 2; - private java.util.List splitKeys_ = java.util.Collections.emptyList();; - private void ensureSplitKeysIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - splitKeys_ = new java.util.ArrayList(splitKeys_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getSplitKeysList() { - return java.util.Collections.unmodifiableList(splitKeys_); - } - public int getSplitKeysCount() { - return splitKeys_.size(); - } - public com.google.protobuf.ByteString getSplitKeys(int index) { - return splitKeys_.get(index); - } - public Builder setSplitKeys( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureSplitKeysIsMutable(); - splitKeys_.set(index, value); - onChanged(); - return this; - } - public Builder addSplitKeys(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureSplitKeysIsMutable(); - splitKeys_.add(value); - onChanged(); - return this; - } - public Builder addAllSplitKeys( - java.lang.Iterable values) { - ensureSplitKeysIsMutable(); - super.addAll(values, splitKeys_); - onChanged(); - return this; - } - public Builder clearSplitKeys() { - splitKeys_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CreateTableRequest) - } - - static { - defaultInstance = new CreateTableRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CreateTableRequest) - } - - public interface CreateTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class CreateTableResponse extends - com.google.protobuf.GeneratedMessage - implements CreateTableResponseOrBuilder { - // Use CreateTableResponse.newBuilder() to construct. - private CreateTableResponse(Builder builder) { - super(builder); - } - private CreateTableResponse(boolean noInit) {} - - private static final CreateTableResponse defaultInstance; - public static CreateTableResponse getDefaultInstance() { - return defaultInstance; - } - - public CreateTableResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:CreateTableResponse) - } - - static { - defaultInstance = new CreateTableResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CreateTableResponse) - } - - public interface DeleteTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - } - public static final class DeleteTableRequest extends - com.google.protobuf.GeneratedMessage - implements DeleteTableRequestOrBuilder { - // Use DeleteTableRequest.newBuilder() to construct. - private DeleteTableRequest(Builder builder) { - super(builder); - } - private DeleteTableRequest(boolean noInit) {} - - private static final DeleteTableRequest defaultInstance; - public static DeleteTableRequest getDefaultInstance() { - return defaultInstance; - } - - public DeleteTableRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:DeleteTableRequest) - } - - static { - defaultInstance = new DeleteTableRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DeleteTableRequest) - } - - public interface DeleteTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class DeleteTableResponse extends - com.google.protobuf.GeneratedMessage - implements DeleteTableResponseOrBuilder { - // Use DeleteTableResponse.newBuilder() to construct. - private DeleteTableResponse(Builder builder) { - super(builder); - } - private DeleteTableResponse(boolean noInit) {} - - private static final DeleteTableResponse defaultInstance; - public static DeleteTableResponse getDefaultInstance() { - return defaultInstance; - } - - public DeleteTableResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:DeleteTableResponse) - } - - static { - defaultInstance = new DeleteTableResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DeleteTableResponse) - } - - public interface EnableTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - } - public static final class EnableTableRequest extends - com.google.protobuf.GeneratedMessage - implements EnableTableRequestOrBuilder { - // Use EnableTableRequest.newBuilder() to construct. - private EnableTableRequest(Builder builder) { - super(builder); - } - private EnableTableRequest(boolean noInit) {} - - private static final EnableTableRequest defaultInstance; - public static EnableTableRequest getDefaultInstance() { - return defaultInstance; - } - - public EnableTableRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:EnableTableRequest) - } - - static { - defaultInstance = new EnableTableRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:EnableTableRequest) - } - - public interface EnableTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class EnableTableResponse extends - com.google.protobuf.GeneratedMessage - implements EnableTableResponseOrBuilder { - // Use EnableTableResponse.newBuilder() to construct. - private EnableTableResponse(Builder builder) { - super(builder); - } - private EnableTableResponse(boolean noInit) {} - - private static final EnableTableResponse defaultInstance; - public static EnableTableResponse getDefaultInstance() { - return defaultInstance; - } - - public EnableTableResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:EnableTableResponse) - } - - static { - defaultInstance = new EnableTableResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:EnableTableResponse) - } - - public interface DisableTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - } - public static final class DisableTableRequest extends - com.google.protobuf.GeneratedMessage - implements DisableTableRequestOrBuilder { - // Use DisableTableRequest.newBuilder() to construct. - private DisableTableRequest(Builder builder) { - super(builder); - } - private DisableTableRequest(boolean noInit) {} - - private static final DisableTableRequest defaultInstance; - public static DisableTableRequest getDefaultInstance() { - return defaultInstance; - } - - public DisableTableRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:DisableTableRequest) - } - - static { - defaultInstance = new DisableTableRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DisableTableRequest) - } - - public interface DisableTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class DisableTableResponse extends - com.google.protobuf.GeneratedMessage - implements DisableTableResponseOrBuilder { - // Use DisableTableResponse.newBuilder() to construct. - private DisableTableResponse(Builder builder) { - super(builder); - } - private DisableTableResponse(boolean noInit) {} - - private static final DisableTableResponse defaultInstance; - public static DisableTableResponse getDefaultInstance() { - return defaultInstance; - } - - public DisableTableResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:DisableTableResponse) - } - - static { - defaultInstance = new DisableTableResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DisableTableResponse) - } - - public interface ModifyTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required .TableSchema tableSchema = 2; - boolean hasTableSchema(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - } - public static final class ModifyTableRequest extends - com.google.protobuf.GeneratedMessage - implements ModifyTableRequestOrBuilder { - // Use ModifyTableRequest.newBuilder() to construct. - private ModifyTableRequest(Builder builder) { - super(builder); - } - private ModifyTableRequest(boolean noInit) {} - - private static final ModifyTableRequest defaultInstance; - public static ModifyTableRequest getDefaultInstance() { - return defaultInstance; - } - - public ModifyTableRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required .TableSchema tableSchema = 2; - public static final int TABLESCHEMA_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; - public boolean hasTableSchema() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasTableSchema()) { - memoizedIsInitialized = 0; - return false; - } - if (!getTableSchema().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableSchema_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableSchema_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasTableSchema() == other.hasTableSchema()); - if (hasTableSchema()) { - result = result && getTableSchema() - .equals(other.getTableSchema()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasTableSchema()) { - hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; - hash = (53 * hash) + getTableSchema().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTableSchemaFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } else { - tableSchemaBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (tableSchemaBuilder_ == null) { - result.tableSchema_ = tableSchema_; - } else { - result.tableSchema_ = tableSchemaBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasTableSchema()) { - mergeTableSchema(other.getTableSchema()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - if (!hasTableSchema()) { - - return false; - } - if (!getTableSchema().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); - if (hasTableSchema()) { - subBuilder.mergeFrom(getTableSchema()); - } - input.readMessage(subBuilder, extensionRegistry); - setTableSchema(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required .TableSchema tableSchema = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; - public boolean hasTableSchema() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - if (tableSchemaBuilder_ == null) { - return tableSchema_; - } else { - return tableSchemaBuilder_.getMessage(); - } - } - public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - tableSchema_ = value; - onChanged(); - } else { - tableSchemaBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setTableSchema( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { - if (tableSchemaBuilder_ == null) { - tableSchema_ = builderForValue.build(); - onChanged(); - } else { - tableSchemaBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { - tableSchema_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); - } else { - tableSchema_ = value; - } - onChanged(); - } else { - tableSchemaBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearTableSchema() { - if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - onChanged(); - } else { - tableSchemaBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getTableSchemaFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - if (tableSchemaBuilder_ != null) { - return tableSchemaBuilder_.getMessageOrBuilder(); - } else { - return tableSchema_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> - getTableSchemaFieldBuilder() { - if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, - getParentForChildren(), - isClean()); - tableSchema_ = null; - } - return tableSchemaBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ModifyTableRequest) - } - - static { - defaultInstance = new ModifyTableRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ModifyTableRequest) - } - - public interface ModifyTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ModifyTableResponse extends - com.google.protobuf.GeneratedMessage - implements ModifyTableResponseOrBuilder { - // Use ModifyTableResponse.newBuilder() to construct. - private ModifyTableResponse(Builder builder) { - super(builder); - } - private ModifyTableResponse(boolean noInit) {} - - private static final ModifyTableResponse defaultInstance; - public static ModifyTableResponse getDefaultInstance() { - return defaultInstance; - } - - public ModifyTableResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ModifyTableResponse) - } - - static { - defaultInstance = new ModifyTableResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ModifyTableResponse) - } - - public interface ShutdownRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ShutdownRequest extends - com.google.protobuf.GeneratedMessage - implements ShutdownRequestOrBuilder { - // Use ShutdownRequest.newBuilder() to construct. - private ShutdownRequest(Builder builder) { - super(builder); - } - private ShutdownRequest(boolean noInit) {} - - private static final ShutdownRequest defaultInstance; - public static ShutdownRequest getDefaultInstance() { - return defaultInstance; - } - - public ShutdownRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ShutdownRequest) - } - - static { - defaultInstance = new ShutdownRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ShutdownRequest) - } - - public interface ShutdownResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ShutdownResponse extends - com.google.protobuf.GeneratedMessage - implements ShutdownResponseOrBuilder { - // Use ShutdownResponse.newBuilder() to construct. - private ShutdownResponse(Builder builder) { - super(builder); - } - private ShutdownResponse(boolean noInit) {} - - private static final ShutdownResponse defaultInstance; - public static ShutdownResponse getDefaultInstance() { - return defaultInstance; - } - - public ShutdownResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ShutdownResponse) - } - - static { - defaultInstance = new ShutdownResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ShutdownResponse) - } - - public interface StopMasterRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class StopMasterRequest extends - com.google.protobuf.GeneratedMessage - implements StopMasterRequestOrBuilder { - // Use StopMasterRequest.newBuilder() to construct. - private StopMasterRequest(Builder builder) { - super(builder); - } - private StopMasterRequest(boolean noInit) {} - - private static final StopMasterRequest defaultInstance; - public static StopMasterRequest getDefaultInstance() { - return defaultInstance; - } - - public StopMasterRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:StopMasterRequest) - } - - static { - defaultInstance = new StopMasterRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:StopMasterRequest) - } - - public interface StopMasterResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class StopMasterResponse extends - com.google.protobuf.GeneratedMessage - implements StopMasterResponseOrBuilder { - // Use StopMasterResponse.newBuilder() to construct. - private StopMasterResponse(Builder builder) { - super(builder); - } - private StopMasterResponse(boolean noInit) {} - - private static final StopMasterResponse defaultInstance; - public static StopMasterResponse getDefaultInstance() { - return defaultInstance; - } - - public StopMasterResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:StopMasterResponse) - } - - static { - defaultInstance = new StopMasterResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:StopMasterResponse) - } - - public interface BalanceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class BalanceRequest extends - com.google.protobuf.GeneratedMessage - implements BalanceRequestOrBuilder { - // Use BalanceRequest.newBuilder() to construct. - private BalanceRequest(Builder builder) { - super(builder); - } - private BalanceRequest(boolean noInit) {} - - private static final BalanceRequest defaultInstance; - public static BalanceRequest getDefaultInstance() { - return defaultInstance; - } - - public BalanceRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:BalanceRequest) - } - - static { - defaultInstance = new BalanceRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BalanceRequest) - } - - public interface BalanceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool balancerRan = 1; - boolean hasBalancerRan(); - boolean getBalancerRan(); - } - public static final class BalanceResponse extends - com.google.protobuf.GeneratedMessage - implements BalanceResponseOrBuilder { - // Use BalanceResponse.newBuilder() to construct. - private BalanceResponse(Builder builder) { - super(builder); - } - private BalanceResponse(boolean noInit) {} - - private static final BalanceResponse defaultInstance; - public static BalanceResponse getDefaultInstance() { - return defaultInstance; - } - - public BalanceResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool balancerRan = 1; - public static final int BALANCERRAN_FIELD_NUMBER = 1; - private boolean balancerRan_; - public boolean hasBalancerRan() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getBalancerRan() { - return balancerRan_; - } - - private void initFields() { - balancerRan_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasBalancerRan()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, balancerRan_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, balancerRan_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) obj; - - boolean result = true; - result = result && (hasBalancerRan() == other.hasBalancerRan()); - if (hasBalancerRan()) { - result = result && (getBalancerRan() - == other.getBalancerRan()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasBalancerRan()) { - hash = (37 * hash) + BALANCERRAN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBalancerRan()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - balancerRan_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.balancerRan_ = balancerRan_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()) return this; - if (other.hasBalancerRan()) { - setBalancerRan(other.getBalancerRan()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasBalancerRan()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - balancerRan_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool balancerRan = 1; - private boolean balancerRan_ ; - public boolean hasBalancerRan() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getBalancerRan() { - return balancerRan_; - } - public Builder setBalancerRan(boolean value) { - bitField0_ |= 0x00000001; - balancerRan_ = value; - onChanged(); - return this; - } - public Builder clearBalancerRan() { - bitField0_ = (bitField0_ & ~0x00000001); - balancerRan_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:BalanceResponse) - } - - static { - defaultInstance = new BalanceResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BalanceResponse) - } - - public interface SetBalancerRunningRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool on = 1; - boolean hasOn(); - boolean getOn(); - - // optional bool synchronous = 2; - boolean hasSynchronous(); - boolean getSynchronous(); - } - public static final class SetBalancerRunningRequest extends - com.google.protobuf.GeneratedMessage - implements SetBalancerRunningRequestOrBuilder { - // Use SetBalancerRunningRequest.newBuilder() to construct. - private SetBalancerRunningRequest(Builder builder) { - super(builder); - } - private SetBalancerRunningRequest(boolean noInit) {} - - private static final SetBalancerRunningRequest defaultInstance; - public static SetBalancerRunningRequest getDefaultInstance() { - return defaultInstance; - } - - public SetBalancerRunningRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bool on = 1; - public static final int ON_FIELD_NUMBER = 1; - private boolean on_; - public boolean hasOn() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getOn() { - return on_; - } - - // optional bool synchronous = 2; - public static final int SYNCHRONOUS_FIELD_NUMBER = 2; - private boolean synchronous_; - public boolean hasSynchronous() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getSynchronous() { - return synchronous_; - } - - private void initFields() { - on_ = false; - synchronous_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasOn()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, on_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, synchronous_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, on_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, synchronous_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) obj; - - boolean result = true; - result = result && (hasOn() == other.hasOn()); - if (hasOn()) { - result = result && (getOn() - == other.getOn()); - } - result = result && (hasSynchronous() == other.hasSynchronous()); - if (hasSynchronous()) { - result = result && (getSynchronous() - == other.getSynchronous()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasOn()) { - hash = (37 * hash) + ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOn()); - } - if (hasSynchronous()) { - hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSynchronous()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - on_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - synchronous_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.on_ = on_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.synchronous_ = synchronous_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance()) return this; - if (other.hasOn()) { - setOn(other.getOn()); - } - if (other.hasSynchronous()) { - setSynchronous(other.getSynchronous()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasOn()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - on_ = input.readBool(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - synchronous_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool on = 1; - private boolean on_ ; - public boolean hasOn() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getOn() { - return on_; - } - public Builder setOn(boolean value) { - bitField0_ |= 0x00000001; - on_ = value; - onChanged(); - return this; - } - public Builder clearOn() { - bitField0_ = (bitField0_ & ~0x00000001); - on_ = false; - onChanged(); - return this; - } - - // optional bool synchronous = 2; - private boolean synchronous_ ; - public boolean hasSynchronous() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getSynchronous() { - return synchronous_; - } - public Builder setSynchronous(boolean value) { - bitField0_ |= 0x00000002; - synchronous_ = value; - onChanged(); - return this; - } - public Builder clearSynchronous() { - bitField0_ = (bitField0_ & ~0x00000002); - synchronous_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SetBalancerRunningRequest) - } - - static { - defaultInstance = new SetBalancerRunningRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SetBalancerRunningRequest) - } - - public interface SetBalancerRunningResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bool prevBalanceValue = 1; - boolean hasPrevBalanceValue(); - boolean getPrevBalanceValue(); - } - public static final class SetBalancerRunningResponse extends - com.google.protobuf.GeneratedMessage - implements SetBalancerRunningResponseOrBuilder { - // Use SetBalancerRunningResponse.newBuilder() to construct. - private SetBalancerRunningResponse(Builder builder) { - super(builder); - } - private SetBalancerRunningResponse(boolean noInit) {} - - private static final SetBalancerRunningResponse defaultInstance; - public static SetBalancerRunningResponse getDefaultInstance() { - return defaultInstance; - } - - public SetBalancerRunningResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional bool prevBalanceValue = 1; - public static final int PREVBALANCEVALUE_FIELD_NUMBER = 1; - private boolean prevBalanceValue_; - public boolean hasPrevBalanceValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getPrevBalanceValue() { - return prevBalanceValue_; - } - - private void initFields() { - prevBalanceValue_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, prevBalanceValue_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, prevBalanceValue_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) obj; - - boolean result = true; - result = result && (hasPrevBalanceValue() == other.hasPrevBalanceValue()); - if (hasPrevBalanceValue()) { - result = result && (getPrevBalanceValue() - == other.getPrevBalanceValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPrevBalanceValue()) { - hash = (37 * hash) + PREVBALANCEVALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevBalanceValue()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - prevBalanceValue_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.prevBalanceValue_ = prevBalanceValue_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()) return this; - if (other.hasPrevBalanceValue()) { - setPrevBalanceValue(other.getPrevBalanceValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - prevBalanceValue_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional bool prevBalanceValue = 1; - private boolean prevBalanceValue_ ; - public boolean hasPrevBalanceValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getPrevBalanceValue() { - return prevBalanceValue_; - } - public Builder setPrevBalanceValue(boolean value) { - bitField0_ |= 0x00000001; - prevBalanceValue_ = value; - onChanged(); - return this; - } - public Builder clearPrevBalanceValue() { - bitField0_ = (bitField0_ & ~0x00000001); - prevBalanceValue_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SetBalancerRunningResponse) - } - - static { - defaultInstance = new SetBalancerRunningResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) - } - - public interface CatalogScanRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class CatalogScanRequest extends - com.google.protobuf.GeneratedMessage - implements CatalogScanRequestOrBuilder { - // Use CatalogScanRequest.newBuilder() to construct. - private CatalogScanRequest(Builder builder) { - super(builder); - } - private CatalogScanRequest(boolean noInit) {} - - private static final CatalogScanRequest defaultInstance; - public static CatalogScanRequest getDefaultInstance() { - return defaultInstance; - } - - public CatalogScanRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:CatalogScanRequest) - } - - static { - defaultInstance = new CatalogScanRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CatalogScanRequest) - } - - public interface CatalogScanResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional int32 scanResult = 1; - boolean hasScanResult(); - int getScanResult(); - } - public static final class CatalogScanResponse extends - com.google.protobuf.GeneratedMessage - implements CatalogScanResponseOrBuilder { - // Use CatalogScanResponse.newBuilder() to construct. - private CatalogScanResponse(Builder builder) { - super(builder); - } - private CatalogScanResponse(boolean noInit) {} - - private static final CatalogScanResponse defaultInstance; - public static CatalogScanResponse getDefaultInstance() { - return defaultInstance; - } - - public CatalogScanResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional int32 scanResult = 1; - public static final int SCANRESULT_FIELD_NUMBER = 1; - private int scanResult_; - public boolean hasScanResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getScanResult() { - return scanResult_; - } - - private void initFields() { - scanResult_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt32(1, scanResult_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(1, scanResult_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) obj; - - boolean result = true; - result = result && (hasScanResult() == other.hasScanResult()); - if (hasScanResult()) { - result = result && (getScanResult() - == other.getScanResult()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasScanResult()) { - hash = (37 * hash) + SCANRESULT_FIELD_NUMBER; - hash = (53 * hash) + getScanResult(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - scanResult_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.scanResult_ = scanResult_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()) return this; - if (other.hasScanResult()) { - setScanResult(other.getScanResult()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - scanResult_ = input.readInt32(); - break; - } - } - } - } - - private int bitField0_; - - // optional int32 scanResult = 1; - private int scanResult_ ; - public boolean hasScanResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getScanResult() { - return scanResult_; - } - public Builder setScanResult(int value) { - bitField0_ |= 0x00000001; - scanResult_ = value; - onChanged(); - return this; - } - public Builder clearScanResult() { - bitField0_ = (bitField0_ & ~0x00000001); - scanResult_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CatalogScanResponse) - } - - static { - defaultInstance = new CatalogScanResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CatalogScanResponse) - } - - public interface EnableCatalogJanitorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool enable = 1; - boolean hasEnable(); - boolean getEnable(); - } - public static final class EnableCatalogJanitorRequest extends - com.google.protobuf.GeneratedMessage - implements EnableCatalogJanitorRequestOrBuilder { - // Use EnableCatalogJanitorRequest.newBuilder() to construct. - private EnableCatalogJanitorRequest(Builder builder) { - super(builder); - } - private EnableCatalogJanitorRequest(boolean noInit) {} - - private static final EnableCatalogJanitorRequest defaultInstance; - public static EnableCatalogJanitorRequest getDefaultInstance() { - return defaultInstance; - } - - public EnableCatalogJanitorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bool enable = 1; - public static final int ENABLE_FIELD_NUMBER = 1; - private boolean enable_; - public boolean hasEnable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getEnable() { - return enable_; - } - - private void initFields() { - enable_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasEnable()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, enable_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, enable_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) obj; - - boolean result = true; - result = result && (hasEnable() == other.hasEnable()); - if (hasEnable()) { - result = result && (getEnable() - == other.getEnable()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEnable()) { - hash = (37 * hash) + ENABLE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnable()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - enable_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.enable_ = enable_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance()) return this; - if (other.hasEnable()) { - setEnable(other.getEnable()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasEnable()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - enable_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool enable = 1; - private boolean enable_ ; - public boolean hasEnable() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getEnable() { - return enable_; - } - public Builder setEnable(boolean value) { - bitField0_ |= 0x00000001; - enable_ = value; - onChanged(); - return this; - } - public Builder clearEnable() { - bitField0_ = (bitField0_ & ~0x00000001); - enable_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorRequest) - } - - static { - defaultInstance = new EnableCatalogJanitorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:EnableCatalogJanitorRequest) - } - - public interface EnableCatalogJanitorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bool prevValue = 1; - boolean hasPrevValue(); - boolean getPrevValue(); - } - public static final class EnableCatalogJanitorResponse extends - com.google.protobuf.GeneratedMessage - implements EnableCatalogJanitorResponseOrBuilder { - // Use EnableCatalogJanitorResponse.newBuilder() to construct. - private EnableCatalogJanitorResponse(Builder builder) { - super(builder); - } - private EnableCatalogJanitorResponse(boolean noInit) {} - - private static final EnableCatalogJanitorResponse defaultInstance; - public static EnableCatalogJanitorResponse getDefaultInstance() { - return defaultInstance; - } - - public EnableCatalogJanitorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional bool prevValue = 1; - public static final int PREVVALUE_FIELD_NUMBER = 1; - private boolean prevValue_; - public boolean hasPrevValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getPrevValue() { - return prevValue_; - } - - private void initFields() { - prevValue_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, prevValue_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, prevValue_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) obj; - - boolean result = true; - result = result && (hasPrevValue() == other.hasPrevValue()); - if (hasPrevValue()) { - result = result && (getPrevValue() - == other.getPrevValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPrevValue()) { - hash = (37 * hash) + PREVVALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevValue()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - prevValue_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.prevValue_ = prevValue_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()) return this; - if (other.hasPrevValue()) { - setPrevValue(other.getPrevValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - prevValue_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional bool prevValue = 1; - private boolean prevValue_ ; - public boolean hasPrevValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getPrevValue() { - return prevValue_; - } - public Builder setPrevValue(boolean value) { - bitField0_ |= 0x00000001; - prevValue_ = value; - onChanged(); - return this; - } - public Builder clearPrevValue() { - bitField0_ = (bitField0_ & ~0x00000001); - prevValue_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorResponse) - } - - static { - defaultInstance = new EnableCatalogJanitorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:EnableCatalogJanitorResponse) - } - - public interface IsCatalogJanitorEnabledRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class IsCatalogJanitorEnabledRequest extends - com.google.protobuf.GeneratedMessage - implements IsCatalogJanitorEnabledRequestOrBuilder { - // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct. - private IsCatalogJanitorEnabledRequest(Builder builder) { - super(builder); - } - private IsCatalogJanitorEnabledRequest(boolean noInit) {} - - private static final IsCatalogJanitorEnabledRequest defaultInstance; - public static IsCatalogJanitorEnabledRequest getDefaultInstance() { - return defaultInstance; - } - - public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledRequest) - } - - static { - defaultInstance = new IsCatalogJanitorEnabledRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledRequest) - } - - public interface IsCatalogJanitorEnabledResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool value = 1; - boolean hasValue(); - boolean getValue(); - } - public static final class IsCatalogJanitorEnabledResponse extends - com.google.protobuf.GeneratedMessage - implements IsCatalogJanitorEnabledResponseOrBuilder { - // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct. - private IsCatalogJanitorEnabledResponse(Builder builder) { - super(builder); - } - private IsCatalogJanitorEnabledResponse(boolean noInit) {} - - private static final IsCatalogJanitorEnabledResponse defaultInstance; - public static IsCatalogJanitorEnabledResponse getDefaultInstance() { - return defaultInstance; - } - - public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private boolean value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getValue() { - return value_; - } - - private void initFields() { - value_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) obj; - - boolean result = true; - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && (getValue() - == other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getValue()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - value_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()) return this; - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - value_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool value = 1; - private boolean value_ ; - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getValue() { - return value_; - } - public Builder setValue(boolean value) { - bitField0_ |= 0x00000001; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000001); - value_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledResponse) - } - - static { - defaultInstance = new IsCatalogJanitorEnabledResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse) - } - - public static abstract class MasterAdminService - implements com.google.protobuf.Service { - protected MasterAdminService() {} - - public interface Interface { - public abstract void addColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void deleteColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void modifyColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void moveRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void assignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void unassignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void offlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void deleteTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void enableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void disableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void modifyTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void createTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void shutdown( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void stopMaster( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void balance( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void setBalancerRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void runCatalogScan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void enableCatalogJanitor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void isCatalogJanitorEnabled( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execMasterService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new MasterAdminService() { - @java.lang.Override - public void addColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, - com.google.protobuf.RpcCallback done) { - impl.addColumn(controller, request, done); - } - - @java.lang.Override - public void deleteColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, - com.google.protobuf.RpcCallback done) { - impl.deleteColumn(controller, request, done); - } - - @java.lang.Override - public void modifyColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, - com.google.protobuf.RpcCallback done) { - impl.modifyColumn(controller, request, done); - } - - @java.lang.Override - public void moveRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.moveRegion(controller, request, done); - } - - @java.lang.Override - public void assignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.assignRegion(controller, request, done); - } - - @java.lang.Override - public void unassignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.unassignRegion(controller, request, done); - } - - @java.lang.Override - public void offlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.offlineRegion(controller, request, done); - } - - @java.lang.Override - public void deleteTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, - com.google.protobuf.RpcCallback done) { - impl.deleteTable(controller, request, done); - } - - @java.lang.Override - public void enableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, - com.google.protobuf.RpcCallback done) { - impl.enableTable(controller, request, done); - } - - @java.lang.Override - public void disableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, - com.google.protobuf.RpcCallback done) { - impl.disableTable(controller, request, done); - } - - @java.lang.Override - public void modifyTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, - com.google.protobuf.RpcCallback done) { - impl.modifyTable(controller, request, done); - } - - @java.lang.Override - public void createTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, - com.google.protobuf.RpcCallback done) { - impl.createTable(controller, request, done); - } - - @java.lang.Override - public void shutdown( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, - com.google.protobuf.RpcCallback done) { - impl.shutdown(controller, request, done); - } - - @java.lang.Override - public void stopMaster( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, - com.google.protobuf.RpcCallback done) { - impl.stopMaster(controller, request, done); - } - - @java.lang.Override - public void balance( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, - com.google.protobuf.RpcCallback done) { - impl.balance(controller, request, done); - } - - @java.lang.Override - public void setBalancerRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, - com.google.protobuf.RpcCallback done) { - impl.setBalancerRunning(controller, request, done); - } - - @java.lang.Override - public void runCatalogScan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, - com.google.protobuf.RpcCallback done) { - impl.runCatalogScan(controller, request, done); - } - - @java.lang.Override - public void enableCatalogJanitor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, - com.google.protobuf.RpcCallback done) { - impl.enableCatalogJanitor(controller, request, done); - } - - @java.lang.Override - public void isCatalogJanitorEnabled( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, - com.google.protobuf.RpcCallback done) { - impl.isCatalogJanitorEnabled(controller, request, done); - } - - @java.lang.Override - public void execMasterService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done) { - impl.execMasterService(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)request); - case 1: - return impl.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)request); - case 2: - return impl.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)request); - case 3: - return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)request); - case 4: - return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)request); - case 5: - return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)request); - case 6: - return impl.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)request); - case 7: - return impl.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)request); - case 8: - return impl.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)request); - case 9: - return impl.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)request); - case 10: - return impl.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)request); - case 11: - return impl.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)request); - case 12: - return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)request); - case 13: - return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)request); - case 14: - return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)request); - case 15: - return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request); - case 16: - return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request); - case 17: - return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request); - case 18: - return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)request); - case 19: - return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); - case 12: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); - case 13: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); - case 14: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); - case 15: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); - case 16: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); - case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); - case 18: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); - case 19: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); - case 12: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); - case 13: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); - case 14: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); - case 15: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); - case 16: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); - case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); - case 18: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); - case 19: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void addColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void deleteColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void modifyColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void moveRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void assignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void unassignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void offlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void deleteTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void enableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void disableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void modifyTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void createTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void shutdown( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void stopMaster( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void balance( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void setBalancerRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void runCatalogScan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void enableCatalogJanitor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void isCatalogJanitorEnabled( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execMasterService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 7: - this.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 8: - this.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 9: - this.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 10: - this.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 11: - this.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 12: - this.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 13: - this.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 14: - this.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 15: - this.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 16: - this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 17: - this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 18: - this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 19: - this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); - case 12: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); - case 13: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); - case 14: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); - case 15: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); - case 16: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); - case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); - case 18: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); - case 19: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); - case 12: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); - case 13: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); - case 14: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); - case 15: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); - case 16: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); - case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); - case 18: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); - case 19: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MasterAdminService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void addColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance())); - } - - public void deleteColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance())); - } - - public void modifyColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance())); - } - - public void moveRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance())); - } - - public void assignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance())); - } - - public void unassignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance())); - } - - public void offlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance())); - } - - public void deleteTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance())); - } - - public void enableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance())); - } - - public void disableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(9), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance())); - } - - public void modifyTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(10), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance())); - } - - public void createTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(11), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance())); - } - - public void shutdown( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(12), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance())); - } - - public void stopMaster( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(13), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance())); - } - - public void balance( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(14), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance())); - } - - public void setBalancerRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(15), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance())); - } - - public void runCatalogScan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(16), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance())); - } - - public void enableCatalogJanitor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(17), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance())); - } - - public void isCatalogJanitorEnabled( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(18), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance())); - } - - public void execMasterService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(19), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(9), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(10), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(11), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(12), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(13), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(14), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(15), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(16), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(17), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(18), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(19), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AddColumnRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AddColumnRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AddColumnResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AddColumnResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DeleteColumnRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DeleteColumnRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DeleteColumnResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DeleteColumnResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ModifyColumnRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ModifyColumnRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ModifyColumnResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ModifyColumnResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MoveRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MoveRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MoveRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MoveRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AssignRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AssignRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AssignRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AssignRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UnassignRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UnassignRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UnassignRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UnassignRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OfflineRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OfflineRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OfflineRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OfflineRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CreateTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CreateTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CreateTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CreateTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DeleteTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DeleteTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DeleteTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DeleteTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_EnableTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_EnableTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_EnableTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_EnableTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DisableTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DisableTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DisableTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DisableTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ModifyTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ModifyTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ModifyTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ModifyTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ShutdownRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ShutdownRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ShutdownResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ShutdownResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_StopMasterRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_StopMasterRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_StopMasterResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_StopMasterResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BalanceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BalanceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BalanceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BalanceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SetBalancerRunningRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SetBalancerRunningRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SetBalancerRunningResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SetBalancerRunningResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CatalogScanRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CatalogScanRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CatalogScanResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CatalogScanResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_EnableCatalogJanitorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_EnableCatalogJanitorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_IsCatalogJanitorEnabledRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_IsCatalogJanitorEnabledResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\021MasterAdmin.proto\032\013hbase.proto\032\014Client" + - ".proto\"R\n\020AddColumnRequest\022\021\n\ttableName\030" + - "\001 \002(\014\022+\n\016columnFamilies\030\002 \002(\0132\023.ColumnFa" + - "milySchema\"\023\n\021AddColumnResponse\"<\n\023Delet" + - "eColumnRequest\022\021\n\ttableName\030\001 \002(\014\022\022\n\ncol" + - "umnName\030\002 \002(\014\"\026\n\024DeleteColumnResponse\"U\n" + - "\023ModifyColumnRequest\022\021\n\ttableName\030\001 \002(\014\022" + - "+\n\016columnFamilies\030\002 \002(\0132\023.ColumnFamilySc" + - "hema\"\026\n\024ModifyColumnResponse\"Z\n\021MoveRegi" + - "onRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif", - "ier\022#\n\016destServerName\030\002 \001(\0132\013.ServerName" + - "\"\024\n\022MoveRegionResponse\"7\n\023AssignRegionRe" + - "quest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\"" + - "\026\n\024AssignRegionResponse\"O\n\025UnassignRegio" + - "nRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + - "er\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026UnassignRegi" + - "onResponse\"8\n\024OfflineRegionRequest\022 \n\006re" + - "gion\030\001 \002(\0132\020.RegionSpecifier\"\027\n\025OfflineR" + - "egionResponse\"J\n\022CreateTableRequest\022!\n\013t" + - "ableSchema\030\001 \002(\0132\014.TableSchema\022\021\n\tsplitK", - "eys\030\002 \003(\014\"\025\n\023CreateTableResponse\"\'\n\022Dele" + - "teTableRequest\022\021\n\ttableName\030\001 \002(\014\"\025\n\023Del" + - "eteTableResponse\"\'\n\022EnableTableRequest\022\021" + - "\n\ttableName\030\001 \002(\014\"\025\n\023EnableTableResponse" + - "\"(\n\023DisableTableRequest\022\021\n\ttableName\030\001 \002" + - "(\014\"\026\n\024DisableTableResponse\"J\n\022ModifyTabl" + - "eRequest\022\021\n\ttableName\030\001 \002(\014\022!\n\013tableSche" + - "ma\030\002 \002(\0132\014.TableSchema\"\025\n\023ModifyTableRes" + - "ponse\"\021\n\017ShutdownRequest\"\022\n\020ShutdownResp" + - "onse\"\023\n\021StopMasterRequest\"\024\n\022StopMasterR", - "esponse\"\020\n\016BalanceRequest\"&\n\017BalanceResp" + - "onse\022\023\n\013balancerRan\030\001 \002(\010\"<\n\031SetBalancer" + - "RunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronou" + - "s\030\002 \001(\010\"6\n\032SetBalancerRunningResponse\022\030\n" + - "\020prevBalanceValue\030\001 \001(\010\"\024\n\022CatalogScanRe" + - "quest\")\n\023CatalogScanResponse\022\022\n\nscanResu" + - "lt\030\001 \001(\005\"-\n\033EnableCatalogJanitorRequest\022" + - "\016\n\006enable\030\001 \002(\010\"1\n\034EnableCatalogJanitorR" + - "esponse\022\021\n\tprevValue\030\001 \001(\010\" \n\036IsCatalogJ" + - "anitorEnabledRequest\"0\n\037IsCatalogJanitor", - "EnabledResponse\022\r\n\005value\030\001 \002(\0102\201\n\n\022Maste" + - "rAdminService\0222\n\taddColumn\022\021.AddColumnRe" + - "quest\032\022.AddColumnResponse\022;\n\014deleteColum" + - "n\022\024.DeleteColumnRequest\032\025.DeleteColumnRe" + - "sponse\022;\n\014modifyColumn\022\024.ModifyColumnReq" + - "uest\032\025.ModifyColumnResponse\0225\n\nmoveRegio" + - "n\022\022.MoveRegionRequest\032\023.MoveRegionRespon" + - "se\022;\n\014assignRegion\022\024.AssignRegionRequest" + - "\032\025.AssignRegionResponse\022A\n\016unassignRegio" + - "n\022\026.UnassignRegionRequest\032\027.UnassignRegi", - "onResponse\022>\n\rofflineRegion\022\025.OfflineReg" + - "ionRequest\032\026.OfflineRegionResponse\0228\n\013de" + - "leteTable\022\023.DeleteTableRequest\032\024.DeleteT" + - "ableResponse\0228\n\013enableTable\022\023.EnableTabl" + - "eRequest\032\024.EnableTableResponse\022;\n\014disabl" + - "eTable\022\024.DisableTableRequest\032\025.DisableTa" + - "bleResponse\0228\n\013modifyTable\022\023.ModifyTable" + - "Request\032\024.ModifyTableResponse\0228\n\013createT" + - "able\022\023.CreateTableRequest\032\024.CreateTableR" + - "esponse\022/\n\010shutdown\022\020.ShutdownRequest\032\021.", - "ShutdownResponse\0225\n\nstopMaster\022\022.StopMas" + - "terRequest\032\023.StopMasterResponse\022,\n\007balan" + - "ce\022\017.BalanceRequest\032\020.BalanceResponse\022M\n" + - "\022setBalancerRunning\022\032.SetBalancerRunning" + - "Request\032\033.SetBalancerRunningResponse\022;\n\016" + - "runCatalogScan\022\023.CatalogScanRequest\032\024.Ca" + - "talogScanResponse\022S\n\024enableCatalogJanito" + - "r\022\034.EnableCatalogJanitorRequest\032\035.Enable" + - "CatalogJanitorResponse\022\\\n\027isCatalogJanit" + - "orEnabled\022\037.IsCatalogJanitorEnabledReque", - "st\032 .IsCatalogJanitorEnabledResponse\022L\n\021" + - "execMasterService\022\032.CoprocessorServiceRe" + - "quest\032\033.CoprocessorServiceResponseBG\n*or" + - "g.apache.hadoop.hbase.protobuf.generated" + - "B\021MasterAdminProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_AddColumnRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_AddColumnRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AddColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnFamilies", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.Builder.class); - internal_static_AddColumnResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_AddColumnResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AddColumnResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.Builder.class); - internal_static_DeleteColumnRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_DeleteColumnRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DeleteColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.Builder.class); - internal_static_DeleteColumnResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_DeleteColumnResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DeleteColumnResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.Builder.class); - internal_static_ModifyColumnRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_ModifyColumnRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ModifyColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnFamilies", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.Builder.class); - internal_static_ModifyColumnResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_ModifyColumnResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ModifyColumnResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.Builder.class); - internal_static_MoveRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_MoveRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MoveRegionRequest_descriptor, - new java.lang.String[] { "Region", "DestServerName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.Builder.class); - internal_static_MoveRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_MoveRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MoveRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.Builder.class); - internal_static_AssignRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_AssignRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AssignRegionRequest_descriptor, - new java.lang.String[] { "Region", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.Builder.class); - internal_static_AssignRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_AssignRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AssignRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.Builder.class); - internal_static_UnassignRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_UnassignRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UnassignRegionRequest_descriptor, - new java.lang.String[] { "Region", "Force", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.Builder.class); - internal_static_UnassignRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_UnassignRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UnassignRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.Builder.class); - internal_static_OfflineRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_OfflineRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OfflineRegionRequest_descriptor, - new java.lang.String[] { "Region", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.Builder.class); - internal_static_OfflineRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_OfflineRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OfflineRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.Builder.class); - internal_static_CreateTableRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_CreateTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CreateTableRequest_descriptor, - new java.lang.String[] { "TableSchema", "SplitKeys", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.Builder.class); - internal_static_CreateTableResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_CreateTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CreateTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.Builder.class); - internal_static_DeleteTableRequest_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_DeleteTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DeleteTableRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.Builder.class); - internal_static_DeleteTableResponse_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_DeleteTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DeleteTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.Builder.class); - internal_static_EnableTableRequest_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_EnableTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EnableTableRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.Builder.class); - internal_static_EnableTableResponse_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_EnableTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EnableTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.Builder.class); - internal_static_DisableTableRequest_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_DisableTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DisableTableRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.Builder.class); - internal_static_DisableTableResponse_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_DisableTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DisableTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.Builder.class); - internal_static_ModifyTableRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_ModifyTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ModifyTableRequest_descriptor, - new java.lang.String[] { "TableName", "TableSchema", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.Builder.class); - internal_static_ModifyTableResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_ModifyTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ModifyTableResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.Builder.class); - internal_static_ShutdownRequest_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_ShutdownRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ShutdownRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.Builder.class); - internal_static_ShutdownResponse_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_ShutdownResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ShutdownResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.Builder.class); - internal_static_StopMasterRequest_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_StopMasterRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_StopMasterRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.Builder.class); - internal_static_StopMasterResponse_descriptor = - getDescriptor().getMessageTypes().get(27); - internal_static_StopMasterResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_StopMasterResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.Builder.class); - internal_static_BalanceRequest_descriptor = - getDescriptor().getMessageTypes().get(28); - internal_static_BalanceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BalanceRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.Builder.class); - internal_static_BalanceResponse_descriptor = - getDescriptor().getMessageTypes().get(29); - internal_static_BalanceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BalanceResponse_descriptor, - new java.lang.String[] { "BalancerRan", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.Builder.class); - internal_static_SetBalancerRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(30); - internal_static_SetBalancerRunningRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SetBalancerRunningRequest_descriptor, - new java.lang.String[] { "On", "Synchronous", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.Builder.class); - internal_static_SetBalancerRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(31); - internal_static_SetBalancerRunningResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SetBalancerRunningResponse_descriptor, - new java.lang.String[] { "PrevBalanceValue", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.Builder.class); - internal_static_CatalogScanRequest_descriptor = - getDescriptor().getMessageTypes().get(32); - internal_static_CatalogScanRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CatalogScanRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); - internal_static_CatalogScanResponse_descriptor = - getDescriptor().getMessageTypes().get(33); - internal_static_CatalogScanResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CatalogScanResponse_descriptor, - new java.lang.String[] { "ScanResult", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); - internal_static_EnableCatalogJanitorRequest_descriptor = - getDescriptor().getMessageTypes().get(34); - internal_static_EnableCatalogJanitorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EnableCatalogJanitorRequest_descriptor, - new java.lang.String[] { "Enable", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.Builder.class); - internal_static_EnableCatalogJanitorResponse_descriptor = - getDescriptor().getMessageTypes().get(35); - internal_static_EnableCatalogJanitorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EnableCatalogJanitorResponse_descriptor, - new java.lang.String[] { "PrevValue", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.Builder.class); - internal_static_IsCatalogJanitorEnabledRequest_descriptor = - getDescriptor().getMessageTypes().get(36); - internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_IsCatalogJanitorEnabledRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.Builder.class); - internal_static_IsCatalogJanitorEnabledResponse_descriptor = - getDescriptor().getMessageTypes().get(37); - internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_IsCatalogJanitorEnabledResponse_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java deleted file mode 100644 index 1f36d15..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java +++ /dev/null @@ -1,3090 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: MasterMonitor.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class MasterMonitorProtos { - private MasterMonitorProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface GetSchemaAlterStatusRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - } - public static final class GetSchemaAlterStatusRequest extends - com.google.protobuf.GeneratedMessage - implements GetSchemaAlterStatusRequestOrBuilder { - // Use GetSchemaAlterStatusRequest.newBuilder() to construct. - private GetSchemaAlterStatusRequest(Builder builder) { - super(builder); - } - private GetSchemaAlterStatusRequest(boolean noInit) {} - - private static final GetSchemaAlterStatusRequest defaultInstance; - public static GetSchemaAlterStatusRequest getDefaultInstance() { - return defaultInstance; - } - - public GetSchemaAlterStatusRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest) - } - - static { - defaultInstance = new GetSchemaAlterStatusRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest) - } - - public interface GetSchemaAlterStatusResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional uint32 yetToUpdateRegions = 1; - boolean hasYetToUpdateRegions(); - int getYetToUpdateRegions(); - - // optional uint32 totalRegions = 2; - boolean hasTotalRegions(); - int getTotalRegions(); - } - public static final class GetSchemaAlterStatusResponse extends - com.google.protobuf.GeneratedMessage - implements GetSchemaAlterStatusResponseOrBuilder { - // Use GetSchemaAlterStatusResponse.newBuilder() to construct. - private GetSchemaAlterStatusResponse(Builder builder) { - super(builder); - } - private GetSchemaAlterStatusResponse(boolean noInit) {} - - private static final GetSchemaAlterStatusResponse defaultInstance; - public static GetSchemaAlterStatusResponse getDefaultInstance() { - return defaultInstance; - } - - public GetSchemaAlterStatusResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional uint32 yetToUpdateRegions = 1; - public static final int YETTOUPDATEREGIONS_FIELD_NUMBER = 1; - private int yetToUpdateRegions_; - public boolean hasYetToUpdateRegions() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getYetToUpdateRegions() { - return yetToUpdateRegions_; - } - - // optional uint32 totalRegions = 2; - public static final int TOTALREGIONS_FIELD_NUMBER = 2; - private int totalRegions_; - public boolean hasTotalRegions() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTotalRegions() { - return totalRegions_; - } - - private void initFields() { - yetToUpdateRegions_ = 0; - totalRegions_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, yetToUpdateRegions_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, totalRegions_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, yetToUpdateRegions_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, totalRegions_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) obj; - - boolean result = true; - result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions()); - if (hasYetToUpdateRegions()) { - result = result && (getYetToUpdateRegions() - == other.getYetToUpdateRegions()); - } - result = result && (hasTotalRegions() == other.hasTotalRegions()); - if (hasTotalRegions()) { - result = result && (getTotalRegions() - == other.getTotalRegions()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasYetToUpdateRegions()) { - hash = (37 * hash) + YETTOUPDATEREGIONS_FIELD_NUMBER; - hash = (53 * hash) + getYetToUpdateRegions(); - } - if (hasTotalRegions()) { - hash = (37 * hash) + TOTALREGIONS_FIELD_NUMBER; - hash = (53 * hash) + getTotalRegions(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - yetToUpdateRegions_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - totalRegions_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.yetToUpdateRegions_ = yetToUpdateRegions_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.totalRegions_ = totalRegions_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this; - if (other.hasYetToUpdateRegions()) { - setYetToUpdateRegions(other.getYetToUpdateRegions()); - } - if (other.hasTotalRegions()) { - setTotalRegions(other.getTotalRegions()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - yetToUpdateRegions_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - totalRegions_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // optional uint32 yetToUpdateRegions = 1; - private int yetToUpdateRegions_ ; - public boolean hasYetToUpdateRegions() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getYetToUpdateRegions() { - return yetToUpdateRegions_; - } - public Builder setYetToUpdateRegions(int value) { - bitField0_ |= 0x00000001; - yetToUpdateRegions_ = value; - onChanged(); - return this; - } - public Builder clearYetToUpdateRegions() { - bitField0_ = (bitField0_ & ~0x00000001); - yetToUpdateRegions_ = 0; - onChanged(); - return this; - } - - // optional uint32 totalRegions = 2; - private int totalRegions_ ; - public boolean hasTotalRegions() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTotalRegions() { - return totalRegions_; - } - public Builder setTotalRegions(int value) { - bitField0_ |= 0x00000002; - totalRegions_ = value; - onChanged(); - return this; - } - public Builder clearTotalRegions() { - bitField0_ = (bitField0_ & ~0x00000002); - totalRegions_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse) - } - - static { - defaultInstance = new GetSchemaAlterStatusResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse) - } - - public interface GetTableDescriptorsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated string tableNames = 1; - java.util.List getTableNamesList(); - int getTableNamesCount(); - String getTableNames(int index); - } - public static final class GetTableDescriptorsRequest extends - com.google.protobuf.GeneratedMessage - implements GetTableDescriptorsRequestOrBuilder { - // Use GetTableDescriptorsRequest.newBuilder() to construct. - private GetTableDescriptorsRequest(Builder builder) { - super(builder); - } - private GetTableDescriptorsRequest(boolean noInit) {} - - private static final GetTableDescriptorsRequest defaultInstance; - public static GetTableDescriptorsRequest getDefaultInstance() { - return defaultInstance; - } - - public GetTableDescriptorsRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; - } - - // repeated string tableNames = 1; - public static final int TABLENAMES_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList tableNames_; - public java.util.List - getTableNamesList() { - return tableNames_; - } - public int getTableNamesCount() { - return tableNames_.size(); - } - public String getTableNames(int index) { - return tableNames_.get(index); - } - - private void initFields() { - tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < tableNames_.size(); i++) { - output.writeBytes(1, tableNames_.getByteString(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < tableNames_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(tableNames_.getByteString(i)); - } - size += dataSize; - size += 1 * getTableNamesList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) obj; - - boolean result = true; - result = result && getTableNamesList() - .equals(other.getTableNamesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getTableNamesCount() > 0) { - hash = (37 * hash) + TABLENAMES_FIELD_NUMBER; - hash = (53 * hash) + getTableNamesList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - tableNames_ = new com.google.protobuf.UnmodifiableLazyStringList( - tableNames_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.tableNames_ = tableNames_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this; - if (!other.tableNames_.isEmpty()) { - if (tableNames_.isEmpty()) { - tableNames_ = other.tableNames_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureTableNamesIsMutable(); - tableNames_.addAll(other.tableNames_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureTableNamesIsMutable(); - tableNames_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated string tableNames = 1; - private com.google.protobuf.LazyStringList tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureTableNamesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - tableNames_ = new com.google.protobuf.LazyStringArrayList(tableNames_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getTableNamesList() { - return java.util.Collections.unmodifiableList(tableNames_); - } - public int getTableNamesCount() { - return tableNames_.size(); - } - public String getTableNames(int index) { - return tableNames_.get(index); - } - public Builder setTableNames( - int index, String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureTableNamesIsMutable(); - tableNames_.set(index, value); - onChanged(); - return this; - } - public Builder addTableNames(String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureTableNamesIsMutable(); - tableNames_.add(value); - onChanged(); - return this; - } - public Builder addAllTableNames( - java.lang.Iterable values) { - ensureTableNamesIsMutable(); - super.addAll(values, tableNames_); - onChanged(); - return this; - } - public Builder clearTableNames() { - tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - void addTableNames(com.google.protobuf.ByteString value) { - ensureTableNamesIsMutable(); - tableNames_.add(value); - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest) - } - - static { - defaultInstance = new GetTableDescriptorsRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest) - } - - public interface GetTableDescriptorsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .TableSchema tableSchema = 1; - java.util.List - getTableSchemaList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); - int getTableSchemaCount(); - java.util.List - getTableSchemaOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( - int index); - } - public static final class GetTableDescriptorsResponse extends - com.google.protobuf.GeneratedMessage - implements GetTableDescriptorsResponseOrBuilder { - // Use GetTableDescriptorsResponse.newBuilder() to construct. - private GetTableDescriptorsResponse(Builder builder) { - super(builder); - } - private GetTableDescriptorsResponse(boolean noInit) {} - - private static final GetTableDescriptorsResponse defaultInstance; - public static GetTableDescriptorsResponse getDefaultInstance() { - return defaultInstance; - } - - public GetTableDescriptorsResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; - } - - // repeated .TableSchema tableSchema = 1; - public static final int TABLESCHEMA_FIELD_NUMBER = 1; - private java.util.List tableSchema_; - public java.util.List getTableSchemaList() { - return tableSchema_; - } - public java.util.List - getTableSchemaOrBuilderList() { - return tableSchema_; - } - public int getTableSchemaCount() { - return tableSchema_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { - return tableSchema_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( - int index) { - return tableSchema_.get(index); - } - - private void initFields() { - tableSchema_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getTableSchemaCount(); i++) { - if (!getTableSchema(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < tableSchema_.size(); i++) { - output.writeMessage(1, tableSchema_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < tableSchema_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableSchema_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) obj; - - boolean result = true; - result = result && getTableSchemaList() - .equals(other.getTableSchemaList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getTableSchemaCount() > 0) { - hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; - hash = (53 * hash) + getTableSchemaList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTableSchemaFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (tableSchemaBuilder_ == null) { - tableSchema_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - tableSchemaBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse(this); - int from_bitField0_ = bitField0_; - if (tableSchemaBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.tableSchema_ = tableSchema_; - } else { - result.tableSchema_ = tableSchemaBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this; - if (tableSchemaBuilder_ == null) { - if (!other.tableSchema_.isEmpty()) { - if (tableSchema_.isEmpty()) { - tableSchema_ = other.tableSchema_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureTableSchemaIsMutable(); - tableSchema_.addAll(other.tableSchema_); - } - onChanged(); - } - } else { - if (!other.tableSchema_.isEmpty()) { - if (tableSchemaBuilder_.isEmpty()) { - tableSchemaBuilder_.dispose(); - tableSchemaBuilder_ = null; - tableSchema_ = other.tableSchema_; - bitField0_ = (bitField0_ & ~0x00000001); - tableSchemaBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getTableSchemaFieldBuilder() : null; - } else { - tableSchemaBuilder_.addAllMessages(other.tableSchema_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getTableSchemaCount(); i++) { - if (!getTableSchema(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addTableSchema(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .TableSchema tableSchema = 1; - private java.util.List tableSchema_ = - java.util.Collections.emptyList(); - private void ensureTableSchemaIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - tableSchema_ = new java.util.ArrayList(tableSchema_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; - - public java.util.List getTableSchemaList() { - if (tableSchemaBuilder_ == null) { - return java.util.Collections.unmodifiableList(tableSchema_); - } else { - return tableSchemaBuilder_.getMessageList(); - } - } - public int getTableSchemaCount() { - if (tableSchemaBuilder_ == null) { - return tableSchema_.size(); - } else { - return tableSchemaBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { - if (tableSchemaBuilder_ == null) { - return tableSchema_.get(index); - } else { - return tableSchemaBuilder_.getMessage(index); - } - } - public Builder setTableSchema( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureTableSchemaIsMutable(); - tableSchema_.set(index, value); - onChanged(); - } else { - tableSchemaBuilder_.setMessage(index, value); - } - return this; - } - public Builder setTableSchema( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { - if (tableSchemaBuilder_ == null) { - ensureTableSchemaIsMutable(); - tableSchema_.set(index, builderForValue.build()); - onChanged(); - } else { - tableSchemaBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureTableSchemaIsMutable(); - tableSchema_.add(value); - onChanged(); - } else { - tableSchemaBuilder_.addMessage(value); - } - return this; - } - public Builder addTableSchema( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { - if (tableSchemaBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureTableSchemaIsMutable(); - tableSchema_.add(index, value); - onChanged(); - } else { - tableSchemaBuilder_.addMessage(index, value); - } - return this; - } - public Builder addTableSchema( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { - if (tableSchemaBuilder_ == null) { - ensureTableSchemaIsMutable(); - tableSchema_.add(builderForValue.build()); - onChanged(); - } else { - tableSchemaBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addTableSchema( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { - if (tableSchemaBuilder_ == null) { - ensureTableSchemaIsMutable(); - tableSchema_.add(index, builderForValue.build()); - onChanged(); - } else { - tableSchemaBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllTableSchema( - java.lang.Iterable values) { - if (tableSchemaBuilder_ == null) { - ensureTableSchemaIsMutable(); - super.addAll(values, tableSchema_); - onChanged(); - } else { - tableSchemaBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearTableSchema() { - if (tableSchemaBuilder_ == null) { - tableSchema_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - tableSchemaBuilder_.clear(); - } - return this; - } - public Builder removeTableSchema(int index) { - if (tableSchemaBuilder_ == null) { - ensureTableSchemaIsMutable(); - tableSchema_.remove(index); - onChanged(); - } else { - tableSchemaBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder( - int index) { - return getTableSchemaFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( - int index) { - if (tableSchemaBuilder_ == null) { - return tableSchema_.get(index); } else { - return tableSchemaBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getTableSchemaOrBuilderList() { - if (tableSchemaBuilder_ != null) { - return tableSchemaBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(tableSchema_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() { - return getTableSchemaFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder( - int index) { - return getTableSchemaFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); - } - public java.util.List - getTableSchemaBuilderList() { - return getTableSchemaFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> - getTableSchemaFieldBuilder() { - if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - tableSchema_ = null; - } - return tableSchemaBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetTableDescriptorsResponse) - } - - static { - defaultInstance = new GetTableDescriptorsResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetTableDescriptorsResponse) - } - - public interface GetClusterStatusRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class GetClusterStatusRequest extends - com.google.protobuf.GeneratedMessage - implements GetClusterStatusRequestOrBuilder { - // Use GetClusterStatusRequest.newBuilder() to construct. - private GetClusterStatusRequest(Builder builder) { - super(builder); - } - private GetClusterStatusRequest(boolean noInit) {} - - private static final GetClusterStatusRequest defaultInstance; - public static GetClusterStatusRequest getDefaultInstance() { - return defaultInstance; - } - - public GetClusterStatusRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:GetClusterStatusRequest) - } - - static { - defaultInstance = new GetClusterStatusRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetClusterStatusRequest) - } - - public interface GetClusterStatusResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ClusterStatus clusterStatus = 1; - boolean hasClusterStatus(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus(); - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder(); - } - public static final class GetClusterStatusResponse extends - com.google.protobuf.GeneratedMessage - implements GetClusterStatusResponseOrBuilder { - // Use GetClusterStatusResponse.newBuilder() to construct. - private GetClusterStatusResponse(Builder builder) { - super(builder); - } - private GetClusterStatusResponse(boolean noInit) {} - - private static final GetClusterStatusResponse defaultInstance; - public static GetClusterStatusResponse getDefaultInstance() { - return defaultInstance; - } - - public GetClusterStatusResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable; - } - - private int bitField0_; - // required .ClusterStatus clusterStatus = 1; - public static final int CLUSTERSTATUS_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_; - public boolean hasClusterStatus() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { - return clusterStatus_; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { - return clusterStatus_; - } - - private void initFields() { - clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasClusterStatus()) { - memoizedIsInitialized = 0; - return false; - } - if (!getClusterStatus().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, clusterStatus_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, clusterStatus_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) obj; - - boolean result = true; - result = result && (hasClusterStatus() == other.hasClusterStatus()); - if (hasClusterStatus()) { - result = result && getClusterStatus() - .equals(other.getClusterStatus()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasClusterStatus()) { - hash = (37 * hash) + CLUSTERSTATUS_FIELD_NUMBER; - hash = (53 * hash) + getClusterStatus().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getClusterStatusFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (clusterStatusBuilder_ == null) { - clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - } else { - clusterStatusBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (clusterStatusBuilder_ == null) { - result.clusterStatus_ = clusterStatus_; - } else { - result.clusterStatus_ = clusterStatusBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance()) return this; - if (other.hasClusterStatus()) { - mergeClusterStatus(other.getClusterStatus()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasClusterStatus()) { - - return false; - } - if (!getClusterStatus().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(); - if (hasClusterStatus()) { - subBuilder.mergeFrom(getClusterStatus()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterStatus(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ClusterStatus clusterStatus = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> clusterStatusBuilder_; - public boolean hasClusterStatus() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { - if (clusterStatusBuilder_ == null) { - return clusterStatus_; - } else { - return clusterStatusBuilder_.getMessage(); - } - } - public Builder setClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { - if (clusterStatusBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - clusterStatus_ = value; - onChanged(); - } else { - clusterStatusBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setClusterStatus( - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder builderForValue) { - if (clusterStatusBuilder_ == null) { - clusterStatus_ = builderForValue.build(); - onChanged(); - } else { - clusterStatusBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { - if (clusterStatusBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - clusterStatus_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) { - clusterStatus_ = - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(clusterStatus_).mergeFrom(value).buildPartial(); - } else { - clusterStatus_ = value; - } - onChanged(); - } else { - clusterStatusBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearClusterStatus() { - if (clusterStatusBuilder_ == null) { - clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - onChanged(); - } else { - clusterStatusBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder getClusterStatusBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getClusterStatusFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { - if (clusterStatusBuilder_ != null) { - return clusterStatusBuilder_.getMessageOrBuilder(); - } else { - return clusterStatus_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> - getClusterStatusFieldBuilder() { - if (clusterStatusBuilder_ == null) { - clusterStatusBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder>( - clusterStatus_, - getParentForChildren(), - isClean()); - clusterStatus_ = null; - } - return clusterStatusBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetClusterStatusResponse) - } - - static { - defaultInstance = new GetClusterStatusResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetClusterStatusResponse) - } - - public static abstract class MasterMonitorService - implements com.google.protobuf.Service { - protected MasterMonitorService() {} - - public interface Interface { - public abstract void getSchemaAlterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getTableDescriptors( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getClusterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new MasterMonitorService() { - @java.lang.Override - public void getSchemaAlterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, - com.google.protobuf.RpcCallback done) { - impl.getSchemaAlterStatus(controller, request, done); - } - - @java.lang.Override - public void getTableDescriptors( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, - com.google.protobuf.RpcCallback done) { - impl.getTableDescriptors(controller, request, done); - } - - @java.lang.Override - public void getClusterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, - com.google.protobuf.RpcCallback done) { - impl.getClusterStatus(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)request); - case 1: - return impl.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)request); - case 2: - return impl.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void getSchemaAlterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getTableDescriptors( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getClusterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.MasterMonitorService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getSchemaAlterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance())); - } - - public void getTableDescriptors( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance())); - } - - public void getClusterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getTableDescriptors( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getClusterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse getTableDescriptors( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse getClusterStatus( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetSchemaAlterStatusRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetSchemaAlterStatusResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetTableDescriptorsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetTableDescriptorsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetTableDescriptorsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetTableDescriptorsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetClusterStatusRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetClusterStatusRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetClusterStatusResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetClusterStatusResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\023MasterMonitor.proto\032\013hbase.proto\032\023Clus" + - "terStatus.proto\"0\n\033GetSchemaAlterStatusR" + - "equest\022\021\n\ttableName\030\001 \002(\014\"P\n\034GetSchemaAl" + - "terStatusResponse\022\032\n\022yetToUpdateRegions\030" + - "\001 \001(\r\022\024\n\014totalRegions\030\002 \001(\r\"0\n\032GetTableD" + - "escriptorsRequest\022\022\n\ntableNames\030\001 \003(\t\"@\n" + - "\033GetTableDescriptorsResponse\022!\n\013tableSch" + - "ema\030\001 \003(\0132\014.TableSchema\"\031\n\027GetClusterSta" + - "tusRequest\"A\n\030GetClusterStatusResponse\022%" + - "\n\rclusterStatus\030\001 \002(\0132\016.ClusterStatus2\206\002", - "\n\024MasterMonitorService\022S\n\024getSchemaAlter" + - "Status\022\034.GetSchemaAlterStatusRequest\032\035.G" + - "etSchemaAlterStatusResponse\022P\n\023getTableD" + - "escriptors\022\033.GetTableDescriptorsRequest\032" + - "\034.GetTableDescriptorsResponse\022G\n\020getClus" + - "terStatus\022\030.GetClusterStatusRequest\032\031.Ge" + - "tClusterStatusResponseBI\n*org.apache.had" + - "oop.hbase.protobuf.generatedB\023MasterMoni" + - "torProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_GetSchemaAlterStatusRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetSchemaAlterStatusRequest_descriptor, - new java.lang.String[] { "TableName", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest.Builder.class); - internal_static_GetSchemaAlterStatusResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetSchemaAlterStatusResponse_descriptor, - new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusResponse.Builder.class); - internal_static_GetTableDescriptorsRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_GetTableDescriptorsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetTableDescriptorsRequest_descriptor, - new java.lang.String[] { "TableNames", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsRequest.Builder.class); - internal_static_GetTableDescriptorsResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_GetTableDescriptorsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetTableDescriptorsResponse_descriptor, - new java.lang.String[] { "TableSchema", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse.Builder.class); - internal_static_GetClusterStatusRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_GetClusterStatusRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetClusterStatusRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest.Builder.class); - internal_static_GetClusterStatusResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_GetClusterStatusResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetClusterStatusResponse_descriptor, - new java.lang.String[] { "ClusterStatus", }, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java deleted file mode 100644 index b8c322f..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ /dev/null @@ -1,969 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Master.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class MasterProtos { - private MasterProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface IsMasterRunningRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class IsMasterRunningRequest extends - com.google.protobuf.GeneratedMessage - implements IsMasterRunningRequestOrBuilder { - // Use IsMasterRunningRequest.newBuilder() to construct. - private IsMasterRunningRequest(Builder builder) { - super(builder); - } - private IsMasterRunningRequest(boolean noInit) {} - - private static final IsMasterRunningRequest defaultInstance; - public static IsMasterRunningRequest getDefaultInstance() { - return defaultInstance; - } - - public IsMasterRunningRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:IsMasterRunningRequest) - } - - static { - defaultInstance = new IsMasterRunningRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:IsMasterRunningRequest) - } - - public interface IsMasterRunningResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool isMasterRunning = 1; - boolean hasIsMasterRunning(); - boolean getIsMasterRunning(); - } - public static final class IsMasterRunningResponse extends - com.google.protobuf.GeneratedMessage - implements IsMasterRunningResponseOrBuilder { - // Use IsMasterRunningResponse.newBuilder() to construct. - private IsMasterRunningResponse(Builder builder) { - super(builder); - } - private IsMasterRunningResponse(boolean noInit) {} - - private static final IsMasterRunningResponse defaultInstance; - public static IsMasterRunningResponse getDefaultInstance() { - return defaultInstance; - } - - public IsMasterRunningResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool isMasterRunning = 1; - public static final int ISMASTERRUNNING_FIELD_NUMBER = 1; - private boolean isMasterRunning_; - public boolean hasIsMasterRunning() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getIsMasterRunning() { - return isMasterRunning_; - } - - private void initFields() { - isMasterRunning_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasIsMasterRunning()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, isMasterRunning_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, isMasterRunning_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) obj; - - boolean result = true; - result = result && (hasIsMasterRunning() == other.hasIsMasterRunning()); - if (hasIsMasterRunning()) { - result = result && (getIsMasterRunning() - == other.getIsMasterRunning()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasIsMasterRunning()) { - hash = (37 * hash) + ISMASTERRUNNING_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIsMasterRunning()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - isMasterRunning_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.isMasterRunning_ = isMasterRunning_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()) return this; - if (other.hasIsMasterRunning()) { - setIsMasterRunning(other.getIsMasterRunning()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasIsMasterRunning()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - isMasterRunning_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool isMasterRunning = 1; - private boolean isMasterRunning_ ; - public boolean hasIsMasterRunning() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getIsMasterRunning() { - return isMasterRunning_; - } - public Builder setIsMasterRunning(boolean value) { - bitField0_ |= 0x00000001; - isMasterRunning_ = value; - onChanged(); - return this; - } - public Builder clearIsMasterRunning() { - bitField0_ = (bitField0_ & ~0x00000001); - isMasterRunning_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:IsMasterRunningResponse) - } - - static { - defaultInstance = new IsMasterRunningResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:IsMasterRunningResponse) - } - - public static abstract class MasterService - implements com.google.protobuf.Service { - protected MasterService() {} - - public interface Interface { - public abstract void isMasterRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new MasterService() { - @java.lang.Override - public void isMasterRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, - com.google.protobuf.RpcCallback done) { - impl.isMasterRunning(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void isMasterRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void isMasterRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_IsMasterRunningRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_IsMasterRunningRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_IsMasterRunningResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_IsMasterRunningResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\014Master.proto\"\030\n\026IsMasterRunningRequest" + - "\"2\n\027IsMasterRunningResponse\022\027\n\017isMasterR" + - "unning\030\001 \002(\0102U\n\rMasterService\022D\n\017isMaste" + - "rRunning\022\027.IsMasterRunningRequest\032\030.IsMa" + - "sterRunningResponseBB\n*org.apache.hadoop" + - ".hbase.protobuf.generatedB\014MasterProtosH" + - "\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_IsMasterRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_IsMasterRunningRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_IsMasterRunningRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); - internal_static_IsMasterRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_IsMasterRunningResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_IsMasterRunningResponse_descriptor, - new java.lang.String[] { "IsMasterRunning", }, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java deleted file mode 100644 index f619f9c..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java +++ /dev/null @@ -1,1184 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: MultiRowMutation.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class MultiRowMutation { - private MultiRowMutation() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface MultiMutateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Mutate mutationRequest = 1; - java.util.List - getMutationRequestList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutationRequest(int index); - int getMutationRequestCount(); - java.util.List - getMutationRequestOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutationRequestOrBuilder( - int index); - } - public static final class MultiMutateRequest extends - com.google.protobuf.GeneratedMessage - implements MultiMutateRequestOrBuilder { - // Use MultiMutateRequest.newBuilder() to construct. - private MultiMutateRequest(Builder builder) { - super(builder); - } - private MultiMutateRequest(boolean noInit) {} - - private static final MultiMutateRequest defaultInstance; - public static MultiMutateRequest getDefaultInstance() { - return defaultInstance; - } - - public MultiMutateRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable; - } - - // repeated .Mutate mutationRequest = 1; - public static final int MUTATIONREQUEST_FIELD_NUMBER = 1; - private java.util.List mutationRequest_; - public java.util.List getMutationRequestList() { - return mutationRequest_; - } - public java.util.List - getMutationRequestOrBuilderList() { - return mutationRequest_; - } - public int getMutationRequestCount() { - return mutationRequest_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutationRequest(int index) { - return mutationRequest_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutationRequestOrBuilder( - int index) { - return mutationRequest_.get(index); - } - - private void initFields() { - mutationRequest_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getMutationRequestCount(); i++) { - if (!getMutationRequest(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < mutationRequest_.size(); i++) { - output.writeMessage(1, mutationRequest_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < mutationRequest_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, mutationRequest_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) obj; - - boolean result = true; - result = result && getMutationRequestList() - .equals(other.getMutationRequestList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getMutationRequestCount() > 0) { - hash = (37 * hash) + MUTATIONREQUEST_FIELD_NUMBER; - hash = (53 * hash) + getMutationRequestList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMutationRequestFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (mutationRequestBuilder_ == null) { - mutationRequest_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - mutationRequestBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this); - int from_bitField0_ = bitField0_; - if (mutationRequestBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.mutationRequest_ = mutationRequest_; - } else { - result.mutationRequest_ = mutationRequestBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance()) return this; - if (mutationRequestBuilder_ == null) { - if (!other.mutationRequest_.isEmpty()) { - if (mutationRequest_.isEmpty()) { - mutationRequest_ = other.mutationRequest_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureMutationRequestIsMutable(); - mutationRequest_.addAll(other.mutationRequest_); - } - onChanged(); - } - } else { - if (!other.mutationRequest_.isEmpty()) { - if (mutationRequestBuilder_.isEmpty()) { - mutationRequestBuilder_.dispose(); - mutationRequestBuilder_ = null; - mutationRequest_ = other.mutationRequest_; - bitField0_ = (bitField0_ & ~0x00000001); - mutationRequestBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getMutationRequestFieldBuilder() : null; - } else { - mutationRequestBuilder_.addAllMessages(other.mutationRequest_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getMutationRequestCount(); i++) { - if (!getMutationRequest(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMutationRequest(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Mutate mutationRequest = 1; - private java.util.List mutationRequest_ = - java.util.Collections.emptyList(); - private void ensureMutationRequestIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - mutationRequest_ = new java.util.ArrayList(mutationRequest_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutationRequestBuilder_; - - public java.util.List getMutationRequestList() { - if (mutationRequestBuilder_ == null) { - return java.util.Collections.unmodifiableList(mutationRequest_); - } else { - return mutationRequestBuilder_.getMessageList(); - } - } - public int getMutationRequestCount() { - if (mutationRequestBuilder_ == null) { - return mutationRequest_.size(); - } else { - return mutationRequestBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutationRequest(int index) { - if (mutationRequestBuilder_ == null) { - return mutationRequest_.get(index); - } else { - return mutationRequestBuilder_.getMessage(index); - } - } - public Builder setMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutationRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMutationRequestIsMutable(); - mutationRequest_.set(index, value); - onChanged(); - } else { - mutationRequestBuilder_.setMessage(index, value); - } - return this; - } - public Builder setMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.set(index, builderForValue.build()); - onChanged(); - } else { - mutationRequestBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutationRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMutationRequestIsMutable(); - mutationRequest_.add(value); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(value); - } - return this; - } - public Builder addMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { - if (mutationRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMutationRequestIsMutable(); - mutationRequest_.add(index, value); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(index, value); - } - return this; - } - public Builder addMutationRequest( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.add(builderForValue.build()); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.add(index, builderForValue.build()); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllMutationRequest( - java.lang.Iterable values) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - super.addAll(values, mutationRequest_); - onChanged(); - } else { - mutationRequestBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearMutationRequest() { - if (mutationRequestBuilder_ == null) { - mutationRequest_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - mutationRequestBuilder_.clear(); - } - return this; - } - public Builder removeMutationRequest(int index) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.remove(index); - onChanged(); - } else { - mutationRequestBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutationRequestBuilder( - int index) { - return getMutationRequestFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutationRequestOrBuilder( - int index) { - if (mutationRequestBuilder_ == null) { - return mutationRequest_.get(index); } else { - return mutationRequestBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getMutationRequestOrBuilderList() { - if (mutationRequestBuilder_ != null) { - return mutationRequestBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(mutationRequest_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder addMutationRequestBuilder() { - return getMutationRequestFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder addMutationRequestBuilder( - int index) { - return getMutationRequestFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()); - } - public java.util.List - getMutationRequestBuilderList() { - return getMutationRequestFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> - getMutationRequestFieldBuilder() { - if (mutationRequestBuilder_ == null) { - mutationRequestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( - mutationRequest_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - mutationRequest_ = null; - } - return mutationRequestBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MultiMutateRequest) - } - - static { - defaultInstance = new MultiMutateRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiMutateRequest) - } - - public interface MultiMutateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class MultiMutateResponse extends - com.google.protobuf.GeneratedMessage - implements MultiMutateResponseOrBuilder { - // Use MultiMutateResponse.newBuilder() to construct. - private MultiMutateResponse(Builder builder) { - super(builder); - } - private MultiMutateResponse(boolean noInit) {} - - private static final MultiMutateResponse defaultInstance; - public static MultiMutateResponse getDefaultInstance() { - return defaultInstance; - } - - public MultiMutateResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:MultiMutateResponse) - } - - static { - defaultInstance = new MultiMutateResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiMutateResponse) - } - - public static abstract class MultiRowMutationService - implements com.google.protobuf.Service { - protected MultiRowMutationService() {} - - public interface Interface { - public abstract void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new MultiRowMutationService() { - @java.lang.Override - public void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done) { - impl.mutateRows(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiMutateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiMutateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiMutateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiMutateResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\026MultiRowMutation.proto\032\014Client.proto\"6" + - "\n\022MultiMutateRequest\022 \n\017mutationRequest\030" + - "\001 \003(\0132\007.Mutate\"\025\n\023MultiMutateResponse2R\n" + - "\027MultiRowMutationService\0227\n\nmutateRows\022\023" + - ".MultiMutateRequest\032\024.MultiMutateRespons" + - "eBF\n*org.apache.hadoop.hbase.protobuf.ge" + - "neratedB\020MultiRowMutationH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_MultiMutateRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_MultiMutateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiMutateRequest_descriptor, - new java.lang.String[] { "MutationRequest", }, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); - internal_static_MultiMutateResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_MultiMutateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiMutateResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java deleted file mode 100644 index 302a411..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ /dev/null @@ -1,3848 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: RPC.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class RPCProtos { - private RPCProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface UserInformationOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string effectiveUser = 1; - boolean hasEffectiveUser(); - String getEffectiveUser(); - - // optional string realUser = 2; - boolean hasRealUser(); - String getRealUser(); - } - public static final class UserInformation extends - com.google.protobuf.GeneratedMessage - implements UserInformationOrBuilder { - // Use UserInformation.newBuilder() to construct. - private UserInformation(Builder builder) { - super(builder); - } - private UserInformation(boolean noInit) {} - - private static final UserInformation defaultInstance; - public static UserInformation getDefaultInstance() { - return defaultInstance; - } - - public UserInformation getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; - } - - private int bitField0_; - // required string effectiveUser = 1; - public static final int EFFECTIVEUSER_FIELD_NUMBER = 1; - private java.lang.Object effectiveUser_; - public boolean hasEffectiveUser() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getEffectiveUser() { - java.lang.Object ref = effectiveUser_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - effectiveUser_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getEffectiveUserBytes() { - java.lang.Object ref = effectiveUser_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - effectiveUser_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string realUser = 2; - public static final int REALUSER_FIELD_NUMBER = 2; - private java.lang.Object realUser_; - public boolean hasRealUser() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getRealUser() { - java.lang.Object ref = realUser_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - realUser_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getRealUserBytes() { - java.lang.Object ref = realUser_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - realUser_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - effectiveUser_ = ""; - realUser_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasEffectiveUser()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getEffectiveUserBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getRealUserBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getEffectiveUserBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getRealUserBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) obj; - - boolean result = true; - result = result && (hasEffectiveUser() == other.hasEffectiveUser()); - if (hasEffectiveUser()) { - result = result && getEffectiveUser() - .equals(other.getEffectiveUser()); - } - result = result && (hasRealUser() == other.hasRealUser()); - if (hasRealUser()) { - result = result && getRealUser() - .equals(other.getRealUser()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEffectiveUser()) { - hash = (37 * hash) + EFFECTIVEUSER_FIELD_NUMBER; - hash = (53 * hash) + getEffectiveUser().hashCode(); - } - if (hasRealUser()) { - hash = (37 * hash) + REALUSER_FIELD_NUMBER; - hash = (53 * hash) + getRealUser().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - effectiveUser_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - realUser_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.effectiveUser_ = effectiveUser_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.realUser_ = realUser_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this; - if (other.hasEffectiveUser()) { - setEffectiveUser(other.getEffectiveUser()); - } - if (other.hasRealUser()) { - setRealUser(other.getRealUser()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasEffectiveUser()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - effectiveUser_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - realUser_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string effectiveUser = 1; - private java.lang.Object effectiveUser_ = ""; - public boolean hasEffectiveUser() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getEffectiveUser() { - java.lang.Object ref = effectiveUser_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - effectiveUser_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setEffectiveUser(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - effectiveUser_ = value; - onChanged(); - return this; - } - public Builder clearEffectiveUser() { - bitField0_ = (bitField0_ & ~0x00000001); - effectiveUser_ = getDefaultInstance().getEffectiveUser(); - onChanged(); - return this; - } - void setEffectiveUser(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - effectiveUser_ = value; - onChanged(); - } - - // optional string realUser = 2; - private java.lang.Object realUser_ = ""; - public boolean hasRealUser() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getRealUser() { - java.lang.Object ref = realUser_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - realUser_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setRealUser(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - realUser_ = value; - onChanged(); - return this; - } - public Builder clearRealUser() { - bitField0_ = (bitField0_ & ~0x00000002); - realUser_ = getDefaultInstance().getRealUser(); - onChanged(); - return this; - } - void setRealUser(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - realUser_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:UserInformation) - } - - static { - defaultInstance = new UserInformation(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UserInformation) - } - - public interface ConnectionHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .UserInformation userInfo = 1; - boolean hasUserInfo(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - - // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; - boolean hasProtocol(); - String getProtocol(); - } - public static final class ConnectionHeader extends - com.google.protobuf.GeneratedMessage - implements ConnectionHeaderOrBuilder { - // Use ConnectionHeader.newBuilder() to construct. - private ConnectionHeader(Builder builder) { - super(builder); - } - private ConnectionHeader(boolean noInit) {} - - private static final ConnectionHeader defaultInstance; - public static ConnectionHeader getDefaultInstance() { - return defaultInstance; - } - - public ConnectionHeader getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; - } - - private int bitField0_; - // optional .UserInformation userInfo = 1; - public static final int USERINFO_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_; - public boolean hasUserInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; - } - - // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; - public static final int PROTOCOL_FIELD_NUMBER = 2; - private java.lang.Object protocol_; - public boolean hasProtocol() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getProtocol() { - java.lang.Object ref = protocol_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - protocol_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getProtocolBytes() { - java.lang.Object ref = protocol_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - protocol_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasUserInfo()) { - if (!getUserInfo().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getProtocolBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getProtocolBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) obj; - - boolean result = true; - result = result && (hasUserInfo() == other.hasUserInfo()); - if (hasUserInfo()) { - result = result && getUserInfo() - .equals(other.getUserInfo()); - } - result = result && (hasProtocol() == other.hasProtocol()); - if (hasProtocol()) { - result = result && getProtocol() - .equals(other.getProtocol()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasUserInfo()) { - hash = (37 * hash) + USERINFO_FIELD_NUMBER; - hash = (53 * hash) + getUserInfo().hashCode(); - } - if (hasProtocol()) { - hash = (37 * hash) + PROTOCOL_FIELD_NUMBER; - hash = (53 * hash) + getProtocol().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getUserInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - } else { - userInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (userInfoBuilder_ == null) { - result.userInfo_ = userInfo_; - } else { - result.userInfo_ = userInfoBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.protocol_ = protocol_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this; - if (other.hasUserInfo()) { - mergeUserInfo(other.getUserInfo()); - } - if (other.hasProtocol()) { - setProtocol(other.getProtocol()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasUserInfo()) { - if (!getUserInfo().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(); - if (hasUserInfo()) { - subBuilder.mergeFrom(getUserInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setUserInfo(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - protocol_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // optional .UserInformation userInfo = 1; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; - public boolean hasUserInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - if (userInfoBuilder_ == null) { - return userInfo_; - } else { - return userInfoBuilder_.getMessage(); - } - } - public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { - if (userInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - userInfo_ = value; - onChanged(); - } else { - userInfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setUserInfo( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) { - if (userInfoBuilder_ == null) { - userInfo_ = builderForValue.build(); - onChanged(); - } else { - userInfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { - if (userInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { - userInfo_ = - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); - } else { - userInfo_ = value; - } - onChanged(); - } else { - userInfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearUserInfo() { - if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - onChanged(); - } else { - userInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getUserInfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - if (userInfoBuilder_ != null) { - return userInfoBuilder_.getMessageOrBuilder(); - } else { - return userInfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> - getUserInfoFieldBuilder() { - if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, - getParentForChildren(), - isClean()); - userInfo_ = null; - } - return userInfoBuilder_; - } - - // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; - private java.lang.Object protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; - public boolean hasProtocol() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getProtocol() { - java.lang.Object ref = protocol_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - protocol_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setProtocol(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - protocol_ = value; - onChanged(); - return this; - } - public Builder clearProtocol() { - bitField0_ = (bitField0_ & ~0x00000002); - protocol_ = getDefaultInstance().getProtocol(); - onChanged(); - return this; - } - void setProtocol(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - protocol_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:ConnectionHeader) - } - - static { - defaultInstance = new ConnectionHeader(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ConnectionHeader) - } - - public interface RpcRequestHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint32 callId = 1; - boolean hasCallId(); - int getCallId(); - - // optional .RPCTInfo tinfo = 2; - boolean hasTinfo(); - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo(); - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder(); - } - public static final class RpcRequestHeader extends - com.google.protobuf.GeneratedMessage - implements RpcRequestHeaderOrBuilder { - // Use RpcRequestHeader.newBuilder() to construct. - private RpcRequestHeader(Builder builder) { - super(builder); - } - private RpcRequestHeader(boolean noInit) {} - - private static final RpcRequestHeader defaultInstance; - public static RpcRequestHeader getDefaultInstance() { - return defaultInstance; - } - - public RpcRequestHeader getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; - } - - private int bitField0_; - // required uint32 callId = 1; - public static final int CALLID_FIELD_NUMBER = 1; - private int callId_; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - - // optional .RPCTInfo tinfo = 2; - public static final int TINFO_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_; - public boolean hasTinfo() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { - return tinfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { - return tinfo_; - } - - private void initFields() { - callId_ = 0; - tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCallId()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tinfo_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tinfo_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) obj; - - boolean result = true; - result = result && (hasCallId() == other.hasCallId()); - if (hasCallId()) { - result = result && (getCallId() - == other.getCallId()); - } - result = result && (hasTinfo() == other.hasTinfo()); - if (hasTinfo()) { - result = result && getTinfo() - .equals(other.getTinfo()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCallId()) { - hash = (37 * hash) + CALLID_FIELD_NUMBER; - hash = (53 * hash) + getCallId(); - } - if (hasTinfo()) { - hash = (37 * hash) + TINFO_FIELD_NUMBER; - hash = (53 * hash) + getTinfo().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeaderOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTinfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - callId_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - if (tinfoBuilder_ == null) { - tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - } else { - tinfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.callId_ = callId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (tinfoBuilder_ == null) { - result.tinfo_ = tinfo_; - } else { - result.tinfo_ = tinfoBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance()) return this; - if (other.hasCallId()) { - setCallId(other.getCallId()); - } - if (other.hasTinfo()) { - mergeTinfo(other.getTinfo()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCallId()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(); - if (hasTinfo()) { - subBuilder.mergeFrom(getTinfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setTinfo(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required uint32 callId = 1; - private int callId_ ; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - public Builder setCallId(int value) { - bitField0_ |= 0x00000001; - callId_ = value; - onChanged(); - return this; - } - public Builder clearCallId() { - bitField0_ = (bitField0_ & ~0x00000001); - callId_ = 0; - onChanged(); - return this; - } - - // optional .RPCTInfo tinfo = 2; - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> tinfoBuilder_; - public boolean hasTinfo() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { - if (tinfoBuilder_ == null) { - return tinfo_; - } else { - return tinfoBuilder_.getMessage(); - } - } - public Builder setTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { - if (tinfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - tinfo_ = value; - onChanged(); - } else { - tinfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setTinfo( - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { - if (tinfoBuilder_ == null) { - tinfo_ = builderForValue.build(); - onChanged(); - } else { - tinfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { - if (tinfoBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - tinfo_ != org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) { - tinfo_ = - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(tinfo_).mergeFrom(value).buildPartial(); - } else { - tinfo_ = value; - } - onChanged(); - } else { - tinfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearTinfo() { - if (tinfoBuilder_ == null) { - tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - onChanged(); - } else { - tinfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTinfoBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getTinfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { - if (tinfoBuilder_ != null) { - return tinfoBuilder_.getMessageOrBuilder(); - } else { - return tinfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> - getTinfoFieldBuilder() { - if (tinfoBuilder_ == null) { - tinfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder>( - tinfo_, - getParentForChildren(), - isClean()); - tinfo_ = null; - } - return tinfoBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RpcRequestHeader) - } - - static { - defaultInstance = new RpcRequestHeader(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcRequestHeader) - } - - public interface RpcRequestBodyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string methodName = 1; - boolean hasMethodName(); - String getMethodName(); - - // optional uint64 clientProtocolVersion = 2; - boolean hasClientProtocolVersion(); - long getClientProtocolVersion(); - - // optional bytes request = 3; - boolean hasRequest(); - com.google.protobuf.ByteString getRequest(); - - // optional string requestClassName = 4; - boolean hasRequestClassName(); - String getRequestClassName(); - } - public static final class RpcRequestBody extends - com.google.protobuf.GeneratedMessage - implements RpcRequestBodyOrBuilder { - // Use RpcRequestBody.newBuilder() to construct. - private RpcRequestBody(Builder builder) { - super(builder); - } - private RpcRequestBody(boolean noInit) {} - - private static final RpcRequestBody defaultInstance; - public static RpcRequestBody getDefaultInstance() { - return defaultInstance; - } - - public RpcRequestBody getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; - } - - private int bitField0_; - // required string methodName = 1; - public static final int METHODNAME_FIELD_NUMBER = 1; - private java.lang.Object methodName_; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - methodName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getMethodNameBytes() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - methodName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional uint64 clientProtocolVersion = 2; - public static final int CLIENTPROTOCOLVERSION_FIELD_NUMBER = 2; - private long clientProtocolVersion_; - public boolean hasClientProtocolVersion() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getClientProtocolVersion() { - return clientProtocolVersion_; - } - - // optional bytes request = 3; - public static final int REQUEST_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString request_; - public boolean hasRequest() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - - // optional string requestClassName = 4; - public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4; - private java.lang.Object requestClassName_; - public boolean hasRequestClassName() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public String getRequestClassName() { - java.lang.Object ref = requestClassName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - requestClassName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getRequestClassNameBytes() { - java.lang.Object ref = requestClassName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - requestClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - methodName_ = ""; - clientProtocolVersion_ = 0L; - request_ = com.google.protobuf.ByteString.EMPTY; - requestClassName_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasMethodName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, clientProtocolVersion_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, request_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getRequestClassNameBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, clientProtocolVersion_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, request_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getRequestClassNameBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) obj; - - boolean result = true; - result = result && (hasMethodName() == other.hasMethodName()); - if (hasMethodName()) { - result = result && getMethodName() - .equals(other.getMethodName()); - } - result = result && (hasClientProtocolVersion() == other.hasClientProtocolVersion()); - if (hasClientProtocolVersion()) { - result = result && (getClientProtocolVersion() - == other.getClientProtocolVersion()); - } - result = result && (hasRequest() == other.hasRequest()); - if (hasRequest()) { - result = result && getRequest() - .equals(other.getRequest()); - } - result = result && (hasRequestClassName() == other.hasRequestClassName()); - if (hasRequestClassName()) { - result = result && getRequestClassName() - .equals(other.getRequestClassName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMethodName()) { - hash = (37 * hash) + METHODNAME_FIELD_NUMBER; - hash = (53 * hash) + getMethodName().hashCode(); - } - if (hasClientProtocolVersion()) { - hash = (37 * hash) + CLIENTPROTOCOLVERSION_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getClientProtocolVersion()); - } - if (hasRequest()) { - hash = (37 * hash) + REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getRequest().hashCode(); - } - if (hasRequestClassName()) { - hash = (37 * hash) + REQUESTCLASSNAME_FIELD_NUMBER; - hash = (53 * hash) + getRequestClassName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBodyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - methodName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - clientProtocolVersion_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - request_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - requestClassName_ = ""; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.methodName_ = methodName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.clientProtocolVersion_ = clientProtocolVersion_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.request_ = request_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.requestClassName_ = requestClassName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance()) return this; - if (other.hasMethodName()) { - setMethodName(other.getMethodName()); - } - if (other.hasClientProtocolVersion()) { - setClientProtocolVersion(other.getClientProtocolVersion()); - } - if (other.hasRequest()) { - setRequest(other.getRequest()); - } - if (other.hasRequestClassName()) { - setRequestClassName(other.getRequestClassName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasMethodName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - methodName_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - clientProtocolVersion_ = input.readUInt64(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - request_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - requestClassName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string methodName = 1; - private java.lang.Object methodName_ = ""; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - methodName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setMethodName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - methodName_ = value; - onChanged(); - return this; - } - public Builder clearMethodName() { - bitField0_ = (bitField0_ & ~0x00000001); - methodName_ = getDefaultInstance().getMethodName(); - onChanged(); - return this; - } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - methodName_ = value; - onChanged(); - } - - // optional uint64 clientProtocolVersion = 2; - private long clientProtocolVersion_ ; - public boolean hasClientProtocolVersion() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getClientProtocolVersion() { - return clientProtocolVersion_; - } - public Builder setClientProtocolVersion(long value) { - bitField0_ |= 0x00000002; - clientProtocolVersion_ = value; - onChanged(); - return this; - } - public Builder clearClientProtocolVersion() { - bitField0_ = (bitField0_ & ~0x00000002); - clientProtocolVersion_ = 0L; - onChanged(); - return this; - } - - // optional bytes request = 3; - private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRequest() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - public Builder setRequest(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - request_ = value; - onChanged(); - return this; - } - public Builder clearRequest() { - bitField0_ = (bitField0_ & ~0x00000004); - request_ = getDefaultInstance().getRequest(); - onChanged(); - return this; - } - - // optional string requestClassName = 4; - private java.lang.Object requestClassName_ = ""; - public boolean hasRequestClassName() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public String getRequestClassName() { - java.lang.Object ref = requestClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - requestClassName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setRequestClassName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - requestClassName_ = value; - onChanged(); - return this; - } - public Builder clearRequestClassName() { - bitField0_ = (bitField0_ & ~0x00000008); - requestClassName_ = getDefaultInstance().getRequestClassName(); - onChanged(); - return this; - } - void setRequestClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000008; - requestClassName_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:RpcRequestBody) - } - - static { - defaultInstance = new RpcRequestBody(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcRequestBody) - } - - public interface RpcResponseHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint32 callId = 1; - boolean hasCallId(); - int getCallId(); - - // required .RpcResponseHeader.Status status = 2; - boolean hasStatus(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus(); - } - public static final class RpcResponseHeader extends - com.google.protobuf.GeneratedMessage - implements RpcResponseHeaderOrBuilder { - // Use RpcResponseHeader.newBuilder() to construct. - private RpcResponseHeader(Builder builder) { - super(builder); - } - private RpcResponseHeader(boolean noInit) {} - - private static final RpcResponseHeader defaultInstance; - public static RpcResponseHeader getDefaultInstance() { - return defaultInstance; - } - - public RpcResponseHeader getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; - } - - public enum Status - implements com.google.protobuf.ProtocolMessageEnum { - SUCCESS(0, 0), - ERROR(1, 1), - FATAL(2, 2), - ; - - public static final int SUCCESS_VALUE = 0; - public static final int ERROR_VALUE = 1; - public static final int FATAL_VALUE = 2; - - - public final int getNumber() { return value; } - - public static Status valueOf(int value) { - switch (value) { - case 0: return SUCCESS; - case 1: return ERROR; - case 2: return FATAL; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Status findValueByNumber(int number) { - return Status.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor().getEnumTypes().get(0); - } - - private static final Status[] VALUES = { - SUCCESS, ERROR, FATAL, - }; - - public static Status valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Status(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:RpcResponseHeader.Status) - } - - private int bitField0_; - // required uint32 callId = 1; - public static final int CALLID_FIELD_NUMBER = 1; - private int callId_; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - - // required .RpcResponseHeader.Status status = 2; - public static final int STATUS_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_; - public boolean hasStatus() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { - return status_; - } - - private void initFields() { - callId_ = 0; - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCallId()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasStatus()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, status_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, status_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) obj; - - boolean result = true; - result = result && (hasCallId() == other.hasCallId()); - if (hasCallId()) { - result = result && (getCallId() - == other.getCallId()); - } - result = result && (hasStatus() == other.hasStatus()); - if (hasStatus()) { - result = result && - (getStatus() == other.getStatus()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCallId()) { - hash = (37 * hash) + CALLID_FIELD_NUMBER; - hash = (53 * hash) + getCallId(); - } - if (hasStatus()) { - hash = (37 * hash) + STATUS_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getStatus()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeaderOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - callId_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.callId_ = callId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.status_ = status_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance()) return this; - if (other.hasCallId()) { - setCallId(other.getCallId()); - } - if (other.hasStatus()) { - setStatus(other.getStatus()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCallId()) { - - return false; - } - if (!hasStatus()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - status_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required uint32 callId = 1; - private int callId_ ; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - public Builder setCallId(int value) { - bitField0_ |= 0x00000001; - callId_ = value; - onChanged(); - return this; - } - public Builder clearCallId() { - bitField0_ = (bitField0_ & ~0x00000001); - callId_ = 0; - onChanged(); - return this; - } - - // required .RpcResponseHeader.Status status = 2; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - public boolean hasStatus() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { - return status_; - } - public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - status_ = value; - onChanged(); - return this; - } - public Builder clearStatus() { - bitField0_ = (bitField0_ & ~0x00000002); - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RpcResponseHeader) - } - - static { - defaultInstance = new RpcResponseHeader(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcResponseHeader) - } - - public interface RpcResponseBodyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes response = 1; - boolean hasResponse(); - com.google.protobuf.ByteString getResponse(); - } - public static final class RpcResponseBody extends - com.google.protobuf.GeneratedMessage - implements RpcResponseBodyOrBuilder { - // Use RpcResponseBody.newBuilder() to construct. - private RpcResponseBody(Builder builder) { - super(builder); - } - private RpcResponseBody(boolean noInit) {} - - private static final RpcResponseBody defaultInstance; - public static RpcResponseBody getDefaultInstance() { - return defaultInstance; - } - - public RpcResponseBody getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; - } - - private int bitField0_; - // optional bytes response = 1; - public static final int RESPONSE_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString response_; - public boolean hasResponse() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getResponse() { - return response_; - } - - private void initFields() { - response_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, response_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, response_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) obj; - - boolean result = true; - result = result && (hasResponse() == other.hasResponse()); - if (hasResponse()) { - result = result && getResponse() - .equals(other.getResponse()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResponse()) { - hash = (37 * hash) + RESPONSE_FIELD_NUMBER; - hash = (53 * hash) + getResponse().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBodyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - response_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.response_ = response_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance()) return this; - if (other.hasResponse()) { - setResponse(other.getResponse()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - response_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // optional bytes response = 1; - private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasResponse() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getResponse() { - return response_; - } - public Builder setResponse(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - response_ = value; - onChanged(); - return this; - } - public Builder clearResponse() { - bitField0_ = (bitField0_ & ~0x00000001); - response_ = getDefaultInstance().getResponse(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RpcResponseBody) - } - - static { - defaultInstance = new RpcResponseBody(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcResponseBody) - } - - public interface RpcExceptionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string exceptionName = 1; - boolean hasExceptionName(); - String getExceptionName(); - - // optional string stackTrace = 2; - boolean hasStackTrace(); - String getStackTrace(); - } - public static final class RpcException extends - com.google.protobuf.GeneratedMessage - implements RpcExceptionOrBuilder { - // Use RpcException.newBuilder() to construct. - private RpcException(Builder builder) { - super(builder); - } - private RpcException(boolean noInit) {} - - private static final RpcException defaultInstance; - public static RpcException getDefaultInstance() { - return defaultInstance; - } - - public RpcException getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; - } - - private int bitField0_; - // required string exceptionName = 1; - public static final int EXCEPTIONNAME_FIELD_NUMBER = 1; - private java.lang.Object exceptionName_; - public boolean hasExceptionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getExceptionName() { - java.lang.Object ref = exceptionName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - exceptionName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getExceptionNameBytes() { - java.lang.Object ref = exceptionName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - exceptionName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string stackTrace = 2; - public static final int STACKTRACE_FIELD_NUMBER = 2; - private java.lang.Object stackTrace_; - public boolean hasStackTrace() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getStackTrace() { - java.lang.Object ref = stackTrace_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - stackTrace_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getStackTraceBytes() { - java.lang.Object ref = stackTrace_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - stackTrace_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - exceptionName_ = ""; - stackTrace_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasExceptionName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getExceptionNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStackTraceBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getExceptionNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStackTraceBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) obj; - - boolean result = true; - result = result && (hasExceptionName() == other.hasExceptionName()); - if (hasExceptionName()) { - result = result && getExceptionName() - .equals(other.getExceptionName()); - } - result = result && (hasStackTrace() == other.hasStackTrace()); - if (hasStackTrace()) { - result = result && getStackTrace() - .equals(other.getStackTrace()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasExceptionName()) { - hash = (37 * hash) + EXCEPTIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getExceptionName().hashCode(); - } - if (hasStackTrace()) { - hash = (37 * hash) + STACKTRACE_FIELD_NUMBER; - hash = (53 * hash) + getStackTrace().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcExceptionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - exceptionName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - stackTrace_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.exceptionName_ = exceptionName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.stackTrace_ = stackTrace_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance()) return this; - if (other.hasExceptionName()) { - setExceptionName(other.getExceptionName()); - } - if (other.hasStackTrace()) { - setStackTrace(other.getStackTrace()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasExceptionName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - exceptionName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - stackTrace_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string exceptionName = 1; - private java.lang.Object exceptionName_ = ""; - public boolean hasExceptionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getExceptionName() { - java.lang.Object ref = exceptionName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - exceptionName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setExceptionName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - exceptionName_ = value; - onChanged(); - return this; - } - public Builder clearExceptionName() { - bitField0_ = (bitField0_ & ~0x00000001); - exceptionName_ = getDefaultInstance().getExceptionName(); - onChanged(); - return this; - } - void setExceptionName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - exceptionName_ = value; - onChanged(); - } - - // optional string stackTrace = 2; - private java.lang.Object stackTrace_ = ""; - public boolean hasStackTrace() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getStackTrace() { - java.lang.Object ref = stackTrace_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - stackTrace_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setStackTrace(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - stackTrace_ = value; - onChanged(); - return this; - } - public Builder clearStackTrace() { - bitField0_ = (bitField0_ & ~0x00000002); - stackTrace_ = getDefaultInstance().getStackTrace(); - onChanged(); - return this; - } - void setStackTrace(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - stackTrace_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:RpcException) - } - - static { - defaultInstance = new RpcException(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcException) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserInformation_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserInformation_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ConnectionHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ConnectionHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcRequestHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcRequestHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcRequestBody_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcRequestBody_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcResponseHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcResponseHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcResponseBody_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcResponseBody_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcException_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcException_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\tRPC.proto\032\rTracing.proto\":\n\017UserInform" + - "ation\022\025\n\reffectiveUser\030\001 \002(\t\022\020\n\010realUser" + - "\030\002 \001(\t\"w\n\020ConnectionHeader\022\"\n\010userInfo\030\001" + - " \001(\0132\020.UserInformation\022?\n\010protocol\030\002 \001(\t" + - ":-org.apache.hadoop.hbase.client.ClientP" + - "rotocol\"<\n\020RpcRequestHeader\022\016\n\006callId\030\001 " + - "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"n\n\016RpcRequ" + - "estBody\022\022\n\nmethodName\030\001 \002(\t\022\035\n\025clientPro" + - "tocolVersion\030\002 \001(\004\022\017\n\007request\030\003 \001(\014\022\030\n\020r" + - "equestClassName\030\004 \001(\t\"{\n\021RpcResponseHead", - "er\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 \002(\0162\031.Rpc" + - "ResponseHeader.Status\"+\n\006Status\022\013\n\007SUCCE" + - "SS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017RpcRespons" + - "eBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcException\022" + - "\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstackTrace\030\002 \001" + - "(\tB<\n*org.apache.hadoop.hbase.protobuf.g" + - "eneratedB\tRPCProtosH\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_UserInformation_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_UserInformation_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserInformation_descriptor, - new java.lang.String[] { "EffectiveUser", "RealUser", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); - internal_static_ConnectionHeader_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_ConnectionHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ConnectionHeader_descriptor, - new java.lang.String[] { "UserInfo", "Protocol", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); - internal_static_RpcRequestHeader_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_RpcRequestHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcRequestHeader_descriptor, - new java.lang.String[] { "CallId", "Tinfo", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.Builder.class); - internal_static_RpcRequestBody_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_RpcRequestBody_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcRequestBody_descriptor, - new java.lang.String[] { "MethodName", "ClientProtocolVersion", "Request", "RequestClassName", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.Builder.class); - internal_static_RpcResponseHeader_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_RpcResponseHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcResponseHeader_descriptor, - new java.lang.String[] { "CallId", "Status", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Builder.class); - internal_static_RpcResponseBody_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_RpcResponseBody_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcResponseBody_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.Builder.class); - internal_static_RpcException_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_RpcException_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcException_descriptor, - new java.lang.String[] { "ExceptionName", "StackTrace", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.Tracing.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java deleted file mode 100644 index 89eebc4..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java +++ /dev/null @@ -1,4284 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: RegionServerStatus.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class RegionServerStatusProtos { - private RegionServerStatusProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface RegionServerStartupRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint32 port = 1; - boolean hasPort(); - int getPort(); - - // required uint64 serverStartCode = 2; - boolean hasServerStartCode(); - long getServerStartCode(); - - // required uint64 serverCurrentTime = 3; - boolean hasServerCurrentTime(); - long getServerCurrentTime(); - } - public static final class RegionServerStartupRequest extends - com.google.protobuf.GeneratedMessage - implements RegionServerStartupRequestOrBuilder { - // Use RegionServerStartupRequest.newBuilder() to construct. - private RegionServerStartupRequest(Builder builder) { - super(builder); - } - private RegionServerStartupRequest(boolean noInit) {} - - private static final RegionServerStartupRequest defaultInstance; - public static RegionServerStartupRequest getDefaultInstance() { - return defaultInstance; - } - - public RegionServerStartupRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable; - } - - private int bitField0_; - // required uint32 port = 1; - public static final int PORT_FIELD_NUMBER = 1; - private int port_; - public boolean hasPort() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getPort() { - return port_; - } - - // required uint64 serverStartCode = 2; - public static final int SERVERSTARTCODE_FIELD_NUMBER = 2; - private long serverStartCode_; - public boolean hasServerStartCode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getServerStartCode() { - return serverStartCode_; - } - - // required uint64 serverCurrentTime = 3; - public static final int SERVERCURRENTTIME_FIELD_NUMBER = 3; - private long serverCurrentTime_; - public boolean hasServerCurrentTime() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getServerCurrentTime() { - return serverCurrentTime_; - } - - private void initFields() { - port_ = 0; - serverStartCode_ = 0L; - serverCurrentTime_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPort()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasServerStartCode()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasServerCurrentTime()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, port_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, serverStartCode_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, serverCurrentTime_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, port_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, serverStartCode_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, serverCurrentTime_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) obj; - - boolean result = true; - result = result && (hasPort() == other.hasPort()); - if (hasPort()) { - result = result && (getPort() - == other.getPort()); - } - result = result && (hasServerStartCode() == other.hasServerStartCode()); - if (hasServerStartCode()) { - result = result && (getServerStartCode() - == other.getServerStartCode()); - } - result = result && (hasServerCurrentTime() == other.hasServerCurrentTime()); - if (hasServerCurrentTime()) { - result = result && (getServerCurrentTime() - == other.getServerCurrentTime()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPort()) { - hash = (37 * hash) + PORT_FIELD_NUMBER; - hash = (53 * hash) + getPort(); - } - if (hasServerStartCode()) { - hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getServerStartCode()); - } - if (hasServerCurrentTime()) { - hash = (37 * hash) + SERVERCURRENTTIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getServerCurrentTime()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - port_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - serverStartCode_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - serverCurrentTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.port_ = port_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.serverStartCode_ = serverStartCode_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.serverCurrentTime_ = serverCurrentTime_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance()) return this; - if (other.hasPort()) { - setPort(other.getPort()); - } - if (other.hasServerStartCode()) { - setServerStartCode(other.getServerStartCode()); - } - if (other.hasServerCurrentTime()) { - setServerCurrentTime(other.getServerCurrentTime()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPort()) { - - return false; - } - if (!hasServerStartCode()) { - - return false; - } - if (!hasServerCurrentTime()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - port_ = input.readUInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - serverStartCode_ = input.readUInt64(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - serverCurrentTime_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required uint32 port = 1; - private int port_ ; - public boolean hasPort() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getPort() { - return port_; - } - public Builder setPort(int value) { - bitField0_ |= 0x00000001; - port_ = value; - onChanged(); - return this; - } - public Builder clearPort() { - bitField0_ = (bitField0_ & ~0x00000001); - port_ = 0; - onChanged(); - return this; - } - - // required uint64 serverStartCode = 2; - private long serverStartCode_ ; - public boolean hasServerStartCode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getServerStartCode() { - return serverStartCode_; - } - public Builder setServerStartCode(long value) { - bitField0_ |= 0x00000002; - serverStartCode_ = value; - onChanged(); - return this; - } - public Builder clearServerStartCode() { - bitField0_ = (bitField0_ & ~0x00000002); - serverStartCode_ = 0L; - onChanged(); - return this; - } - - // required uint64 serverCurrentTime = 3; - private long serverCurrentTime_ ; - public boolean hasServerCurrentTime() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getServerCurrentTime() { - return serverCurrentTime_; - } - public Builder setServerCurrentTime(long value) { - bitField0_ |= 0x00000004; - serverCurrentTime_ = value; - onChanged(); - return this; - } - public Builder clearServerCurrentTime() { - bitField0_ = (bitField0_ & ~0x00000004); - serverCurrentTime_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RegionServerStartupRequest) - } - - static { - defaultInstance = new RegionServerStartupRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionServerStartupRequest) - } - - public interface RegionServerStartupResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .NameStringPair mapEntries = 1; - java.util.List - getMapEntriesList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index); - int getMapEntriesCount(); - java.util.List - getMapEntriesOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( - int index); - } - public static final class RegionServerStartupResponse extends - com.google.protobuf.GeneratedMessage - implements RegionServerStartupResponseOrBuilder { - // Use RegionServerStartupResponse.newBuilder() to construct. - private RegionServerStartupResponse(Builder builder) { - super(builder); - } - private RegionServerStartupResponse(boolean noInit) {} - - private static final RegionServerStartupResponse defaultInstance; - public static RegionServerStartupResponse getDefaultInstance() { - return defaultInstance; - } - - public RegionServerStartupResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable; - } - - // repeated .NameStringPair mapEntries = 1; - public static final int MAPENTRIES_FIELD_NUMBER = 1; - private java.util.List mapEntries_; - public java.util.List getMapEntriesList() { - return mapEntries_; - } - public java.util.List - getMapEntriesOrBuilderList() { - return mapEntries_; - } - public int getMapEntriesCount() { - return mapEntries_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { - return mapEntries_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( - int index) { - return mapEntries_.get(index); - } - - private void initFields() { - mapEntries_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getMapEntriesCount(); i++) { - if (!getMapEntries(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < mapEntries_.size(); i++) { - output.writeMessage(1, mapEntries_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < mapEntries_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, mapEntries_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) obj; - - boolean result = true; - result = result && getMapEntriesList() - .equals(other.getMapEntriesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getMapEntriesCount() > 0) { - hash = (37 * hash) + MAPENTRIES_FIELD_NUMBER; - hash = (53 * hash) + getMapEntriesList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMapEntriesFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (mapEntriesBuilder_ == null) { - mapEntries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - mapEntriesBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(this); - int from_bitField0_ = bitField0_; - if (mapEntriesBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.mapEntries_ = mapEntries_; - } else { - result.mapEntries_ = mapEntriesBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()) return this; - if (mapEntriesBuilder_ == null) { - if (!other.mapEntries_.isEmpty()) { - if (mapEntries_.isEmpty()) { - mapEntries_ = other.mapEntries_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureMapEntriesIsMutable(); - mapEntries_.addAll(other.mapEntries_); - } - onChanged(); - } - } else { - if (!other.mapEntries_.isEmpty()) { - if (mapEntriesBuilder_.isEmpty()) { - mapEntriesBuilder_.dispose(); - mapEntriesBuilder_ = null; - mapEntries_ = other.mapEntries_; - bitField0_ = (bitField0_ & ~0x00000001); - mapEntriesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getMapEntriesFieldBuilder() : null; - } else { - mapEntriesBuilder_.addAllMessages(other.mapEntries_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getMapEntriesCount(); i++) { - if (!getMapEntries(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addMapEntries(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .NameStringPair mapEntries = 1; - private java.util.List mapEntries_ = - java.util.Collections.emptyList(); - private void ensureMapEntriesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - mapEntries_ = new java.util.ArrayList(mapEntries_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_; - - public java.util.List getMapEntriesList() { - if (mapEntriesBuilder_ == null) { - return java.util.Collections.unmodifiableList(mapEntries_); - } else { - return mapEntriesBuilder_.getMessageList(); - } - } - public int getMapEntriesCount() { - if (mapEntriesBuilder_ == null) { - return mapEntries_.size(); - } else { - return mapEntriesBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { - if (mapEntriesBuilder_ == null) { - return mapEntries_.get(index); - } else { - return mapEntriesBuilder_.getMessage(index); - } - } - public Builder setMapEntries( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { - if (mapEntriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMapEntriesIsMutable(); - mapEntries_.set(index, value); - onChanged(); - } else { - mapEntriesBuilder_.setMessage(index, value); - } - return this; - } - public Builder setMapEntries( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { - if (mapEntriesBuilder_ == null) { - ensureMapEntriesIsMutable(); - mapEntries_.set(index, builderForValue.build()); - onChanged(); - } else { - mapEntriesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addMapEntries(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { - if (mapEntriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMapEntriesIsMutable(); - mapEntries_.add(value); - onChanged(); - } else { - mapEntriesBuilder_.addMessage(value); - } - return this; - } - public Builder addMapEntries( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { - if (mapEntriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMapEntriesIsMutable(); - mapEntries_.add(index, value); - onChanged(); - } else { - mapEntriesBuilder_.addMessage(index, value); - } - return this; - } - public Builder addMapEntries( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { - if (mapEntriesBuilder_ == null) { - ensureMapEntriesIsMutable(); - mapEntries_.add(builderForValue.build()); - onChanged(); - } else { - mapEntriesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addMapEntries( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { - if (mapEntriesBuilder_ == null) { - ensureMapEntriesIsMutable(); - mapEntries_.add(index, builderForValue.build()); - onChanged(); - } else { - mapEntriesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllMapEntries( - java.lang.Iterable values) { - if (mapEntriesBuilder_ == null) { - ensureMapEntriesIsMutable(); - super.addAll(values, mapEntries_); - onChanged(); - } else { - mapEntriesBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearMapEntries() { - if (mapEntriesBuilder_ == null) { - mapEntries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - mapEntriesBuilder_.clear(); - } - return this; - } - public Builder removeMapEntries(int index) { - if (mapEntriesBuilder_ == null) { - ensureMapEntriesIsMutable(); - mapEntries_.remove(index); - onChanged(); - } else { - mapEntriesBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder( - int index) { - return getMapEntriesFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( - int index) { - if (mapEntriesBuilder_ == null) { - return mapEntries_.get(index); } else { - return mapEntriesBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getMapEntriesOrBuilderList() { - if (mapEntriesBuilder_ != null) { - return mapEntriesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(mapEntries_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() { - return getMapEntriesFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder( - int index) { - return getMapEntriesFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); - } - public java.util.List - getMapEntriesBuilderList() { - return getMapEntriesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> - getMapEntriesFieldBuilder() { - if (mapEntriesBuilder_ == null) { - mapEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( - mapEntries_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - mapEntries_ = null; - } - return mapEntriesBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RegionServerStartupResponse) - } - - static { - defaultInstance = new RegionServerStartupResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionServerStartupResponse) - } - - public interface RegionServerReportRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName server = 1; - boolean hasServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - - // optional .ServerLoad load = 2; - boolean hasLoad(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder(); - } - public static final class RegionServerReportRequest extends - com.google.protobuf.GeneratedMessage - implements RegionServerReportRequestOrBuilder { - // Use RegionServerReportRequest.newBuilder() to construct. - private RegionServerReportRequest(Builder builder) { - super(builder); - } - private RegionServerReportRequest(boolean noInit) {} - - private static final RegionServerReportRequest defaultInstance; - public static RegionServerReportRequest getDefaultInstance() { - return defaultInstance; - } - - public RegionServerReportRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName server = 1; - public static final int SERVER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; - } - - // optional .ServerLoad load = 2; - public static final int LOAD_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_; - public boolean hasLoad() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() { - return load_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() { - return load_; - } - - private void initFields() { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServer()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServer().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (hasLoad()) { - if (!getLoad().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, load_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, load_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) obj; - - boolean result = true; - result = result && (hasServer() == other.hasServer()); - if (hasServer()) { - result = result && getServer() - .equals(other.getServer()); - } - result = result && (hasLoad() == other.hasLoad()); - if (hasLoad()) { - result = result && getLoad() - .equals(other.getLoad()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServer()) { - hash = (37 * hash) + SERVER_FIELD_NUMBER; - hash = (53 * hash) + getServer().hashCode(); - } - if (hasLoad()) { - hash = (37 * hash) + LOAD_FIELD_NUMBER; - hash = (53 * hash) + getLoad().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerFieldBuilder(); - getLoadFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (loadBuilder_ == null) { - load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - } else { - loadBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverBuilder_ == null) { - result.server_ = server_; - } else { - result.server_ = serverBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (loadBuilder_ == null) { - result.load_ = load_; - } else { - result.load_ = loadBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance()) return this; - if (other.hasServer()) { - mergeServer(other.getServer()); - } - if (other.hasLoad()) { - mergeLoad(other.getLoad()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServer()) { - - return false; - } - if (!getServer().isInitialized()) { - - return false; - } - if (hasLoad()) { - if (!getLoad().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); - if (hasLoad()) { - subBuilder.mergeFrom(getLoad()); - } - input.readMessage(subBuilder, extensionRegistry); - setLoad(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName server = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - if (serverBuilder_ == null) { - return server_; - } else { - return serverBuilder_.getMessage(); - } - } - public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - server_ = value; - onChanged(); - } else { - serverBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServer( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverBuilder_ == null) { - server_ = builderForValue.build(); - onChanged(); - } else { - serverBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - server_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); - } else { - server_ = value; - } - onChanged(); - } else { - serverBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServer() { - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - if (serverBuilder_ != null) { - return serverBuilder_.getMessageOrBuilder(); - } else { - return server_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerFieldBuilder() { - if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, - getParentForChildren(), - isClean()); - server_ = null; - } - return serverBuilder_; - } - - // optional .ServerLoad load = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> loadBuilder_; - public boolean hasLoad() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() { - if (loadBuilder_ == null) { - return load_; - } else { - return loadBuilder_.getMessage(); - } - } - public Builder setLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { - if (loadBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - load_ = value; - onChanged(); - } else { - loadBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setLoad( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { - if (loadBuilder_ == null) { - load_ = builderForValue.build(); - onChanged(); - } else { - loadBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { - if (loadBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - load_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) { - load_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(load_).mergeFrom(value).buildPartial(); - } else { - load_ = value; - } - onChanged(); - } else { - loadBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearLoad() { - if (loadBuilder_ == null) { - load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); - onChanged(); - } else { - loadBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getLoadBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getLoadFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() { - if (loadBuilder_ != null) { - return loadBuilder_.getMessageOrBuilder(); - } else { - return load_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> - getLoadFieldBuilder() { - if (loadBuilder_ == null) { - loadBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder>( - load_, - getParentForChildren(), - isClean()); - load_ = null; - } - return loadBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RegionServerReportRequest) - } - - static { - defaultInstance = new RegionServerReportRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionServerReportRequest) - } - - public interface RegionServerReportResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class RegionServerReportResponse extends - com.google.protobuf.GeneratedMessage - implements RegionServerReportResponseOrBuilder { - // Use RegionServerReportResponse.newBuilder() to construct. - private RegionServerReportResponse(Builder builder) { - super(builder); - } - private RegionServerReportResponse(boolean noInit) {} - - private static final RegionServerReportResponse defaultInstance; - public static RegionServerReportResponse getDefaultInstance() { - return defaultInstance; - } - - public RegionServerReportResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:RegionServerReportResponse) - } - - static { - defaultInstance = new RegionServerReportResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionServerReportResponse) - } - - public interface ReportRSFatalErrorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName server = 1; - boolean hasServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - - // required string errorMessage = 2; - boolean hasErrorMessage(); - String getErrorMessage(); - } - public static final class ReportRSFatalErrorRequest extends - com.google.protobuf.GeneratedMessage - implements ReportRSFatalErrorRequestOrBuilder { - // Use ReportRSFatalErrorRequest.newBuilder() to construct. - private ReportRSFatalErrorRequest(Builder builder) { - super(builder); - } - private ReportRSFatalErrorRequest(boolean noInit) {} - - private static final ReportRSFatalErrorRequest defaultInstance; - public static ReportRSFatalErrorRequest getDefaultInstance() { - return defaultInstance; - } - - public ReportRSFatalErrorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName server = 1; - public static final int SERVER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; - } - - // required string errorMessage = 2; - public static final int ERRORMESSAGE_FIELD_NUMBER = 2; - private java.lang.Object errorMessage_; - public boolean hasErrorMessage() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getErrorMessage() { - java.lang.Object ref = errorMessage_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - errorMessage_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getErrorMessageBytes() { - java.lang.Object ref = errorMessage_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - errorMessage_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - errorMessage_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServer()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasErrorMessage()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServer().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getErrorMessageBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getErrorMessageBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) obj; - - boolean result = true; - result = result && (hasServer() == other.hasServer()); - if (hasServer()) { - result = result && getServer() - .equals(other.getServer()); - } - result = result && (hasErrorMessage() == other.hasErrorMessage()); - if (hasErrorMessage()) { - result = result && getErrorMessage() - .equals(other.getErrorMessage()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServer()) { - hash = (37 * hash) + SERVER_FIELD_NUMBER; - hash = (53 * hash) + getServer().hashCode(); - } - if (hasErrorMessage()) { - hash = (37 * hash) + ERRORMESSAGE_FIELD_NUMBER; - hash = (53 * hash) + getErrorMessage().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - errorMessage_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverBuilder_ == null) { - result.server_ = server_; - } else { - result.server_ = serverBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.errorMessage_ = errorMessage_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance()) return this; - if (other.hasServer()) { - mergeServer(other.getServer()); - } - if (other.hasErrorMessage()) { - setErrorMessage(other.getErrorMessage()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServer()) { - - return false; - } - if (!hasErrorMessage()) { - - return false; - } - if (!getServer().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - errorMessage_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName server = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - if (serverBuilder_ == null) { - return server_; - } else { - return serverBuilder_.getMessage(); - } - } - public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - server_ = value; - onChanged(); - } else { - serverBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServer( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverBuilder_ == null) { - server_ = builderForValue.build(); - onChanged(); - } else { - serverBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - server_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); - } else { - server_ = value; - } - onChanged(); - } else { - serverBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServer() { - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - if (serverBuilder_ != null) { - return serverBuilder_.getMessageOrBuilder(); - } else { - return server_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerFieldBuilder() { - if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, - getParentForChildren(), - isClean()); - server_ = null; - } - return serverBuilder_; - } - - // required string errorMessage = 2; - private java.lang.Object errorMessage_ = ""; - public boolean hasErrorMessage() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getErrorMessage() { - java.lang.Object ref = errorMessage_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - errorMessage_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setErrorMessage(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - errorMessage_ = value; - onChanged(); - return this; - } - public Builder clearErrorMessage() { - bitField0_ = (bitField0_ & ~0x00000002); - errorMessage_ = getDefaultInstance().getErrorMessage(); - onChanged(); - return this; - } - void setErrorMessage(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - errorMessage_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:ReportRSFatalErrorRequest) - } - - static { - defaultInstance = new ReportRSFatalErrorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReportRSFatalErrorRequest) - } - - public interface ReportRSFatalErrorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ReportRSFatalErrorResponse extends - com.google.protobuf.GeneratedMessage - implements ReportRSFatalErrorResponseOrBuilder { - // Use ReportRSFatalErrorResponse.newBuilder() to construct. - private ReportRSFatalErrorResponse(Builder builder) { - super(builder); - } - private ReportRSFatalErrorResponse(boolean noInit) {} - - private static final ReportRSFatalErrorResponse defaultInstance; - public static ReportRSFatalErrorResponse getDefaultInstance() { - return defaultInstance; - } - - public ReportRSFatalErrorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ReportRSFatalErrorResponse) - } - - static { - defaultInstance = new ReportRSFatalErrorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReportRSFatalErrorResponse) - } - - public interface GetLastFlushedSequenceIdRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes regionName = 1; - boolean hasRegionName(); - com.google.protobuf.ByteString getRegionName(); - } - public static final class GetLastFlushedSequenceIdRequest extends - com.google.protobuf.GeneratedMessage - implements GetLastFlushedSequenceIdRequestOrBuilder { - // Use GetLastFlushedSequenceIdRequest.newBuilder() to construct. - private GetLastFlushedSequenceIdRequest(Builder builder) { - super(builder); - } - private GetLastFlushedSequenceIdRequest(boolean noInit) {} - - private static final GetLastFlushedSequenceIdRequest defaultInstance; - public static GetLastFlushedSequenceIdRequest getDefaultInstance() { - return defaultInstance; - } - - public GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; - } - - private int bitField0_; - // required bytes regionName = 1; - public static final int REGIONNAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString regionName_; - public boolean hasRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - - private void initFields() { - regionName_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, regionName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, regionName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) obj; - - boolean result = true; - result = result && (hasRegionName() == other.hasRegionName()); - if (hasRegionName()) { - result = result && getRegionName() - .equals(other.getRegionName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionName()) { - hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getRegionName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - regionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.regionName_ = regionName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance()) return this; - if (other.hasRegionName()) { - setRegionName(other.getRegionName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - regionName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes regionName = 1; - private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - public Builder setRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - regionName_ = value; - onChanged(); - return this; - } - public Builder clearRegionName() { - bitField0_ = (bitField0_ & ~0x00000001); - regionName_ = getDefaultInstance().getRegionName(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetLastFlushedSequenceIdRequest) - } - - static { - defaultInstance = new GetLastFlushedSequenceIdRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetLastFlushedSequenceIdRequest) - } - - public interface GetLastFlushedSequenceIdResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 lastFlushedSequenceId = 1; - boolean hasLastFlushedSequenceId(); - long getLastFlushedSequenceId(); - } - public static final class GetLastFlushedSequenceIdResponse extends - com.google.protobuf.GeneratedMessage - implements GetLastFlushedSequenceIdResponseOrBuilder { - // Use GetLastFlushedSequenceIdResponse.newBuilder() to construct. - private GetLastFlushedSequenceIdResponse(Builder builder) { - super(builder); - } - private GetLastFlushedSequenceIdResponse(boolean noInit) {} - - private static final GetLastFlushedSequenceIdResponse defaultInstance; - public static GetLastFlushedSequenceIdResponse getDefaultInstance() { - return defaultInstance; - } - - public GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 lastFlushedSequenceId = 1; - public static final int LASTFLUSHEDSEQUENCEID_FIELD_NUMBER = 1; - private long lastFlushedSequenceId_; - public boolean hasLastFlushedSequenceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLastFlushedSequenceId() { - return lastFlushedSequenceId_; - } - - private void initFields() { - lastFlushedSequenceId_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLastFlushedSequenceId()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, lastFlushedSequenceId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, lastFlushedSequenceId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) obj; - - boolean result = true; - result = result && (hasLastFlushedSequenceId() == other.hasLastFlushedSequenceId()); - if (hasLastFlushedSequenceId()) { - result = result && (getLastFlushedSequenceId() - == other.getLastFlushedSequenceId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLastFlushedSequenceId()) { - hash = (37 * hash) + LASTFLUSHEDSEQUENCEID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastFlushedSequenceId()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lastFlushedSequenceId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lastFlushedSequenceId_ = lastFlushedSequenceId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()) return this; - if (other.hasLastFlushedSequenceId()) { - setLastFlushedSequenceId(other.getLastFlushedSequenceId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLastFlushedSequenceId()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lastFlushedSequenceId_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 lastFlushedSequenceId = 1; - private long lastFlushedSequenceId_ ; - public boolean hasLastFlushedSequenceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLastFlushedSequenceId() { - return lastFlushedSequenceId_; - } - public Builder setLastFlushedSequenceId(long value) { - bitField0_ |= 0x00000001; - lastFlushedSequenceId_ = value; - onChanged(); - return this; - } - public Builder clearLastFlushedSequenceId() { - bitField0_ = (bitField0_ & ~0x00000001); - lastFlushedSequenceId_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetLastFlushedSequenceIdResponse) - } - - static { - defaultInstance = new GetLastFlushedSequenceIdResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetLastFlushedSequenceIdResponse) - } - - public static abstract class RegionServerStatusService - implements com.google.protobuf.Service { - protected RegionServerStatusService() {} - - public interface Interface { - public abstract void regionServerStartup( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void regionServerReport( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void reportRSFatalError( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getLastFlushedSequenceId( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new RegionServerStatusService() { - @java.lang.Override - public void regionServerStartup( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, - com.google.protobuf.RpcCallback done) { - impl.regionServerStartup(controller, request, done); - } - - @java.lang.Override - public void regionServerReport( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, - com.google.protobuf.RpcCallback done) { - impl.regionServerReport(controller, request, done); - } - - @java.lang.Override - public void reportRSFatalError( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, - com.google.protobuf.RpcCallback done) { - impl.reportRSFatalError(controller, request, done); - } - - @java.lang.Override - public void getLastFlushedSequenceId( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, - com.google.protobuf.RpcCallback done) { - impl.getLastFlushedSequenceId(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.regionServerStartup(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)request); - case 1: - return impl.regionServerReport(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)request); - case 2: - return impl.reportRSFatalError(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)request); - case 3: - return impl.getLastFlushedSequenceId(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void regionServerStartup( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void regionServerReport( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void reportRSFatalError( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getLastFlushedSequenceId( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.regionServerStartup(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.regionServerReport(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.reportRSFatalError(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.getLastFlushedSequenceId(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void regionServerStartup( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance())); - } - - public void regionServerReport( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance())); - } - - public void reportRSFatalError( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance())); - } - - public void getLastFlushedSequenceId( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse regionServerStartup( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse regionServerReport( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse reportRSFatalError( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getLastFlushedSequenceId( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionServerStartupRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionServerStartupRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionServerStartupResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionServerStartupResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionServerReportRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionServerReportRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionServerReportResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionServerReportResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReportRSFatalErrorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReportRSFatalErrorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReportRSFatalErrorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReportRSFatalErrorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetLastFlushedSequenceIdRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetLastFlushedSequenceIdResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\030RegionServerStatus.proto\032\013hbase.proto\"" + - "^\n\032RegionServerStartupRequest\022\014\n\004port\030\001 " + - "\002(\r\022\027\n\017serverStartCode\030\002 \002(\004\022\031\n\021serverCu" + - "rrentTime\030\003 \002(\004\"B\n\033RegionServerStartupRe" + - "sponse\022#\n\nmapEntries\030\001 \003(\0132\017.NameStringP" + - "air\"S\n\031RegionServerReportRequest\022\033\n\006serv" + - "er\030\001 \002(\0132\013.ServerName\022\031\n\004load\030\002 \001(\0132\013.Se" + - "rverLoad\"\034\n\032RegionServerReportResponse\"N" + - "\n\031ReportRSFatalErrorRequest\022\033\n\006server\030\001 " + - "\002(\0132\013.ServerName\022\024\n\014errorMessage\030\002 \002(\t\"\034", - "\n\032ReportRSFatalErrorResponse\"5\n\037GetLastF" + - "lushedSequenceIdRequest\022\022\n\nregionName\030\001 " + - "\002(\014\"A\n GetLastFlushedSequenceIdResponse\022" + - "\035\n\025lastFlushedSequenceId\030\001 \002(\0042\354\002\n\031Regio" + - "nServerStatusService\022P\n\023regionServerStar" + - "tup\022\033.RegionServerStartupRequest\032\034.Regio" + - "nServerStartupResponse\022M\n\022regionServerRe" + - "port\022\032.RegionServerReportRequest\032\033.Regio" + - "nServerReportResponse\022M\n\022reportRSFatalEr" + - "ror\022\032.ReportRSFatalErrorRequest\032\033.Report", - "RSFatalErrorResponse\022_\n\030getLastFlushedSe" + - "quenceId\022 .GetLastFlushedSequenceIdReque" + - "st\032!.GetLastFlushedSequenceIdResponseBN\n" + - "*org.apache.hadoop.hbase.protobuf.genera" + - "tedB\030RegionServerStatusProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_RegionServerStartupRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_RegionServerStartupRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionServerStartupRequest_descriptor, - new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); - internal_static_RegionServerStartupResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_RegionServerStartupResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionServerStartupResponse_descriptor, - new java.lang.String[] { "MapEntries", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); - internal_static_RegionServerReportRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_RegionServerReportRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionServerReportRequest_descriptor, - new java.lang.String[] { "Server", "Load", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); - internal_static_RegionServerReportResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_RegionServerReportResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionServerReportResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class); - internal_static_ReportRSFatalErrorRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_ReportRSFatalErrorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReportRSFatalErrorRequest_descriptor, - new java.lang.String[] { "Server", "ErrorMessage", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); - internal_static_ReportRSFatalErrorResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_ReportRSFatalErrorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReportRSFatalErrorResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class); - internal_static_GetLastFlushedSequenceIdRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_GetLastFlushedSequenceIdRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetLastFlushedSequenceIdRequest_descriptor, - new java.lang.String[] { "RegionName", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class); - internal_static_GetLastFlushedSequenceIdResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_GetLastFlushedSequenceIdResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetLastFlushedSequenceIdResponse_descriptor, - new java.lang.String[] { "LastFlushedSequenceId", }, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java deleted file mode 100644 index e634f17..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java +++ /dev/null @@ -1,490 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Tracing.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class Tracing { - private Tracing() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface RPCTInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional int64 traceId = 1; - boolean hasTraceId(); - long getTraceId(); - - // optional int64 parentId = 2; - boolean hasParentId(); - long getParentId(); - } - public static final class RPCTInfo extends - com.google.protobuf.GeneratedMessage - implements RPCTInfoOrBuilder { - // Use RPCTInfo.newBuilder() to construct. - private RPCTInfo(Builder builder) { - super(builder); - } - private RPCTInfo(boolean noInit) {} - - private static final RPCTInfo defaultInstance; - public static RPCTInfo getDefaultInstance() { - return defaultInstance; - } - - public RPCTInfo getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable; - } - - private int bitField0_; - // optional int64 traceId = 1; - public static final int TRACEID_FIELD_NUMBER = 1; - private long traceId_; - public boolean hasTraceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getTraceId() { - return traceId_; - } - - // optional int64 parentId = 2; - public static final int PARENTID_FIELD_NUMBER = 2; - private long parentId_; - public boolean hasParentId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getParentId() { - return parentId_; - } - - private void initFields() { - traceId_ = 0L; - parentId_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, traceId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt64(2, parentId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, traceId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(2, parentId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj; - - boolean result = true; - result = result && (hasTraceId() == other.hasTraceId()); - if (hasTraceId()) { - result = result && (getTraceId() - == other.getTraceId()); - } - result = result && (hasParentId() == other.hasParentId()); - if (hasParentId()) { - result = result && (getParentId() - == other.getParentId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTraceId()) { - hash = (37 * hash) + TRACEID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTraceId()); - } - if (hasParentId()) { - hash = (37 * hash) + PARENTID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getParentId()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - traceId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - parentId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.traceId_ = traceId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.parentId_ = parentId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this; - if (other.hasTraceId()) { - setTraceId(other.getTraceId()); - } - if (other.hasParentId()) { - setParentId(other.getParentId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - traceId_ = input.readInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - parentId_ = input.readInt64(); - break; - } - } - } - } - - private int bitField0_; - - // optional int64 traceId = 1; - private long traceId_ ; - public boolean hasTraceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getTraceId() { - return traceId_; - } - public Builder setTraceId(long value) { - bitField0_ |= 0x00000001; - traceId_ = value; - onChanged(); - return this; - } - public Builder clearTraceId() { - bitField0_ = (bitField0_ & ~0x00000001); - traceId_ = 0L; - onChanged(); - return this; - } - - // optional int64 parentId = 2; - private long parentId_ ; - public boolean hasParentId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getParentId() { - return parentId_; - } - public Builder setParentId(long value) { - bitField0_ |= 0x00000002; - parentId_ = value; - onChanged(); - return this; - } - public Builder clearParentId() { - bitField0_ = (bitField0_ & ~0x00000002); - parentId_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RPCTInfo) - } - - static { - defaultInstance = new RPCTInfo(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RPCTInfo) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RPCTInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RPCTInfo_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\rTracing.proto\"-\n\010RPCTInfo\022\017\n\007traceId\030\001" + - " \001(\003\022\020\n\010parentId\030\002 \001(\003B:\n*org.apache.had" + - "oop.hbase.protobuf.generatedB\007TracingH\001\240" + - "\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_RPCTInfo_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_RPCTInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RPCTInfo_descriptor, - new java.lang.String[] { "TraceId", "ParentId", }, - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java deleted file mode 100644 index 93999c6..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java +++ /dev/null @@ -1,5082 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ZooKeeper.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class ZooKeeperProtos { - private ZooKeeperProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface RootRegionServerOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName server = 1; - boolean hasServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - } - public static final class RootRegionServer extends - com.google.protobuf.GeneratedMessage - implements RootRegionServerOrBuilder { - // Use RootRegionServer.newBuilder() to construct. - private RootRegionServer(Builder builder) { - super(builder); - } - private RootRegionServer(boolean noInit) {} - - private static final RootRegionServer defaultInstance; - public static RootRegionServer getDefaultInstance() { - return defaultInstance; - } - - public RootRegionServer getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName server = 1; - public static final int SERVER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; - } - - private void initFields() { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServer()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServer().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) obj; - - boolean result = true; - result = result && (hasServer() == other.hasServer()); - if (hasServer()) { - result = result && getServer() - .equals(other.getServer()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServer()) { - hash = (37 * hash) + SERVER_FIELD_NUMBER; - hash = (53 * hash) + getServer().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServerOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RootRegionServer_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverBuilder_ == null) { - result.server_ = server_; - } else { - result.server_ = serverBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.getDefaultInstance()) return this; - if (other.hasServer()) { - mergeServer(other.getServer()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServer()) { - - return false; - } - if (!getServer().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServer()) { - subBuilder.mergeFrom(getServer()); - } - input.readMessage(subBuilder, extensionRegistry); - setServer(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName server = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; - public boolean hasServer() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { - if (serverBuilder_ == null) { - return server_; - } else { - return serverBuilder_.getMessage(); - } - } - public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - server_ = value; - onChanged(); - } else { - serverBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServer( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverBuilder_ == null) { - server_ = builderForValue.build(); - onChanged(); - } else { - serverBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - server_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); - } else { - server_ = value; - } - onChanged(); - } else { - serverBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServer() { - if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - if (serverBuilder_ != null) { - return serverBuilder_.getMessageOrBuilder(); - } else { - return server_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerFieldBuilder() { - if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, - getParentForChildren(), - isClean()); - server_ = null; - } - return serverBuilder_; - } - - // @@protoc_insertion_point(builder_scope:RootRegionServer) - } - - static { - defaultInstance = new RootRegionServer(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RootRegionServer) - } - - public interface MasterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName master = 1; - boolean hasMaster(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); - } - public static final class Master extends - com.google.protobuf.GeneratedMessage - implements MasterOrBuilder { - // Use Master.newBuilder() to construct. - private Master(Builder builder) { - super(builder); - } - private Master(boolean noInit) {} - - private static final Master defaultInstance; - public static Master getDefaultInstance() { - return defaultInstance; - } - - public Master getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName master = 1; - public static final int MASTER_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; - public boolean hasMaster() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { - return master_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { - return master_; - } - - private void initFields() { - master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasMaster()) { - memoizedIsInitialized = 0; - return false; - } - if (!getMaster().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, master_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, master_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) obj; - - boolean result = true; - result = result && (hasMaster() == other.hasMaster()); - if (hasMaster()) { - result = result && getMaster() - .equals(other.getMaster()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMaster()) { - hash = (37 * hash) + MASTER_FIELD_NUMBER; - hash = (53 * hash) + getMaster().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MasterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMasterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - masterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (masterBuilder_ == null) { - result.master_ = master_; - } else { - result.master_ = masterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance()) return this; - if (other.hasMaster()) { - mergeMaster(other.getMaster()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasMaster()) { - - return false; - } - if (!getMaster().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasMaster()) { - subBuilder.mergeFrom(getMaster()); - } - input.readMessage(subBuilder, extensionRegistry); - setMaster(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName master = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; - public boolean hasMaster() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { - if (masterBuilder_ == null) { - return master_; - } else { - return masterBuilder_.getMessage(); - } - } - public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (masterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - master_ = value; - onChanged(); - } else { - masterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setMaster( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (masterBuilder_ == null) { - master_ = builderForValue.build(); - onChanged(); - } else { - masterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (masterBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - master_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - master_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); - } else { - master_ = value; - } - onChanged(); - } else { - masterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearMaster() { - if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - masterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getMasterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { - if (masterBuilder_ != null) { - return masterBuilder_.getMessageOrBuilder(); - } else { - return master_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getMasterFieldBuilder() { - if (masterBuilder_ == null) { - masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - master_, - getParentForChildren(), - isClean()); - master_ = null; - } - return masterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Master) - } - - static { - defaultInstance = new Master(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Master) - } - - public interface ClusterUpOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string startDate = 1; - boolean hasStartDate(); - String getStartDate(); - } - public static final class ClusterUp extends - com.google.protobuf.GeneratedMessage - implements ClusterUpOrBuilder { - // Use ClusterUp.newBuilder() to construct. - private ClusterUp(Builder builder) { - super(builder); - } - private ClusterUp(boolean noInit) {} - - private static final ClusterUp defaultInstance; - public static ClusterUp getDefaultInstance() { - return defaultInstance; - } - - public ClusterUp getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable; - } - - private int bitField0_; - // required string startDate = 1; - public static final int STARTDATE_FIELD_NUMBER = 1; - private java.lang.Object startDate_; - public boolean hasStartDate() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getStartDate() { - java.lang.Object ref = startDate_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - startDate_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getStartDateBytes() { - java.lang.Object ref = startDate_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - startDate_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - startDate_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasStartDate()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getStartDateBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getStartDateBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) obj; - - boolean result = true; - result = result && (hasStartDate() == other.hasStartDate()); - if (hasStartDate()) { - result = result && getStartDate() - .equals(other.getStartDate()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasStartDate()) { - hash = (37 * hash) + STARTDATE_FIELD_NUMBER; - hash = (53 * hash) + getStartDate().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - startDate_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.startDate_ = startDate_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance()) return this; - if (other.hasStartDate()) { - setStartDate(other.getStartDate()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasStartDate()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - startDate_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string startDate = 1; - private java.lang.Object startDate_ = ""; - public boolean hasStartDate() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getStartDate() { - java.lang.Object ref = startDate_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - startDate_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setStartDate(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - startDate_ = value; - onChanged(); - return this; - } - public Builder clearStartDate() { - bitField0_ = (bitField0_ & ~0x00000001); - startDate_ = getDefaultInstance().getStartDate(); - onChanged(); - return this; - } - void setStartDate(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - startDate_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:ClusterUp) - } - - static { - defaultInstance = new ClusterUp(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ClusterUp) - } - - public interface RegionTransitionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint32 eventTypeCode = 1; - boolean hasEventTypeCode(); - int getEventTypeCode(); - - // required bytes regionName = 2; - boolean hasRegionName(); - com.google.protobuf.ByteString getRegionName(); - - // required uint64 createTime = 3; - boolean hasCreateTime(); - long getCreateTime(); - - // required .ServerName serverName = 4; - boolean hasServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - - // optional bytes payload = 5; - boolean hasPayload(); - com.google.protobuf.ByteString getPayload(); - } - public static final class RegionTransition extends - com.google.protobuf.GeneratedMessage - implements RegionTransitionOrBuilder { - // Use RegionTransition.newBuilder() to construct. - private RegionTransition(Builder builder) { - super(builder); - } - private RegionTransition(boolean noInit) {} - - private static final RegionTransition defaultInstance; - public static RegionTransition getDefaultInstance() { - return defaultInstance; - } - - public RegionTransition getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable; - } - - private int bitField0_; - // required uint32 eventTypeCode = 1; - public static final int EVENTTYPECODE_FIELD_NUMBER = 1; - private int eventTypeCode_; - public boolean hasEventTypeCode() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getEventTypeCode() { - return eventTypeCode_; - } - - // required bytes regionName = 2; - public static final int REGIONNAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString regionName_; - public boolean hasRegionName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - - // required uint64 createTime = 3; - public static final int CREATETIME_FIELD_NUMBER = 3; - private long createTime_; - public boolean hasCreateTime() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getCreateTime() { - return createTime_; - } - - // required .ServerName serverName = 4; - public static final int SERVERNAME_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; - } - - // optional bytes payload = 5; - public static final int PAYLOAD_FIELD_NUMBER = 5; - private com.google.protobuf.ByteString payload_; - public boolean hasPayload() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public com.google.protobuf.ByteString getPayload() { - return payload_; - } - - private void initFields() { - eventTypeCode_ = 0; - regionName_ = com.google.protobuf.ByteString.EMPTY; - createTime_ = 0L; - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - payload_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasEventTypeCode()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRegionName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCreateTime()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasServerName()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServerName().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, eventTypeCode_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, regionName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, createTime_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, serverName_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBytes(5, payload_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, eventTypeCode_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, regionName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, createTime_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, serverName_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, payload_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) obj; - - boolean result = true; - result = result && (hasEventTypeCode() == other.hasEventTypeCode()); - if (hasEventTypeCode()) { - result = result && (getEventTypeCode() - == other.getEventTypeCode()); - } - result = result && (hasRegionName() == other.hasRegionName()); - if (hasRegionName()) { - result = result && getRegionName() - .equals(other.getRegionName()); - } - result = result && (hasCreateTime() == other.hasCreateTime()); - if (hasCreateTime()) { - result = result && (getCreateTime() - == other.getCreateTime()); - } - result = result && (hasServerName() == other.hasServerName()); - if (hasServerName()) { - result = result && getServerName() - .equals(other.getServerName()); - } - result = result && (hasPayload() == other.hasPayload()); - if (hasPayload()) { - result = result && getPayload() - .equals(other.getPayload()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEventTypeCode()) { - hash = (37 * hash) + EVENTTYPECODE_FIELD_NUMBER; - hash = (53 * hash) + getEventTypeCode(); - } - if (hasRegionName()) { - hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getRegionName().hashCode(); - } - if (hasCreateTime()) { - hash = (37 * hash) + CREATETIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCreateTime()); - } - if (hasServerName()) { - hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; - hash = (53 * hash) + getServerName().hashCode(); - } - if (hasPayload()) { - hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; - hash = (53 * hash) + getPayload().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransitionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerNameFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - eventTypeCode_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - regionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - createTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000008); - payload_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.eventTypeCode_ = eventTypeCode_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.regionName_ = regionName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.createTime_ = createTime_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - if (serverNameBuilder_ == null) { - result.serverName_ = serverName_; - } else { - result.serverName_ = serverNameBuilder_.build(); - } - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.payload_ = payload_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance()) return this; - if (other.hasEventTypeCode()) { - setEventTypeCode(other.getEventTypeCode()); - } - if (other.hasRegionName()) { - setRegionName(other.getRegionName()); - } - if (other.hasCreateTime()) { - setCreateTime(other.getCreateTime()); - } - if (other.hasServerName()) { - mergeServerName(other.getServerName()); - } - if (other.hasPayload()) { - setPayload(other.getPayload()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasEventTypeCode()) { - - return false; - } - if (!hasRegionName()) { - - return false; - } - if (!hasCreateTime()) { - - return false; - } - if (!hasServerName()) { - - return false; - } - if (!getServerName().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - eventTypeCode_ = input.readUInt32(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - regionName_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - createTime_ = input.readUInt64(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } - case 42: { - bitField0_ |= 0x00000010; - payload_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required uint32 eventTypeCode = 1; - private int eventTypeCode_ ; - public boolean hasEventTypeCode() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getEventTypeCode() { - return eventTypeCode_; - } - public Builder setEventTypeCode(int value) { - bitField0_ |= 0x00000001; - eventTypeCode_ = value; - onChanged(); - return this; - } - public Builder clearEventTypeCode() { - bitField0_ = (bitField0_ & ~0x00000001); - eventTypeCode_ = 0; - onChanged(); - return this; - } - - // required bytes regionName = 2; - private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRegionName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - public Builder setRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - regionName_ = value; - onChanged(); - return this; - } - public Builder clearRegionName() { - bitField0_ = (bitField0_ & ~0x00000002); - regionName_ = getDefaultInstance().getRegionName(); - onChanged(); - return this; - } - - // required uint64 createTime = 3; - private long createTime_ ; - public boolean hasCreateTime() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getCreateTime() { - return createTime_; - } - public Builder setCreateTime(long value) { - bitField0_ |= 0x00000004; - createTime_ = value; - onChanged(); - return this; - } - public Builder clearCreateTime() { - bitField0_ = (bitField0_ & ~0x00000004); - createTime_ = 0L; - onChanged(); - return this; - } - - // required .ServerName serverName = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - if (serverNameBuilder_ == null) { - return serverName_; - } else { - return serverNameBuilder_.getMessage(); - } - } - public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - serverName_ = value; - onChanged(); - } else { - serverNameBuilder_.setMessage(value); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder setServerName( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverNameBuilder_ == null) { - serverName_ = builderForValue.build(); - onChanged(); - } else { - serverNameBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008) && - serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - serverName_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); - } else { - serverName_ = value; - } - onChanged(); - } else { - serverNameBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000008; - return this; - } - public Builder clearServerName() { - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { - bitField0_ |= 0x00000008; - onChanged(); - return getServerNameFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - if (serverNameBuilder_ != null) { - return serverNameBuilder_.getMessageOrBuilder(); - } else { - return serverName_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerNameFieldBuilder() { - if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, - getParentForChildren(), - isClean()); - serverName_ = null; - } - return serverNameBuilder_; - } - - // optional bytes payload = 5; - private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasPayload() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public com.google.protobuf.ByteString getPayload() { - return payload_; - } - public Builder setPayload(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000010; - payload_ = value; - onChanged(); - return this; - } - public Builder clearPayload() { - bitField0_ = (bitField0_ & ~0x00000010); - payload_ = getDefaultInstance().getPayload(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RegionTransition) - } - - static { - defaultInstance = new RegionTransition(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RegionTransition) - } - - public interface SplitLogTaskOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .SplitLogTask.State state = 1; - boolean hasState(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState(); - - // required .ServerName serverName = 2; - boolean hasServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - } - public static final class SplitLogTask extends - com.google.protobuf.GeneratedMessage - implements SplitLogTaskOrBuilder { - // Use SplitLogTask.newBuilder() to construct. - private SplitLogTask(Builder builder) { - super(builder); - } - private SplitLogTask(boolean noInit) {} - - private static final SplitLogTask defaultInstance; - public static SplitLogTask getDefaultInstance() { - return defaultInstance; - } - - public SplitLogTask getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable; - } - - public enum State - implements com.google.protobuf.ProtocolMessageEnum { - UNASSIGNED(0, 0), - OWNED(1, 1), - RESIGNED(2, 2), - DONE(3, 3), - ERR(4, 4), - ; - - public static final int UNASSIGNED_VALUE = 0; - public static final int OWNED_VALUE = 1; - public static final int RESIGNED_VALUE = 2; - public static final int DONE_VALUE = 3; - public static final int ERR_VALUE = 4; - - - public final int getNumber() { return value; } - - public static State valueOf(int value) { - switch (value) { - case 0: return UNASSIGNED; - case 1: return OWNED; - case 2: return RESIGNED; - case 3: return DONE; - case 4: return ERR; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public State findValueByNumber(int number) { - return State.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(0); - } - - private static final State[] VALUES = { - UNASSIGNED, OWNED, RESIGNED, DONE, ERR, - }; - - public static State valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private State(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:SplitLogTask.State) - } - - private int bitField0_; - // required .SplitLogTask.State state = 1; - public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_; - public boolean hasState() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { - return state_; - } - - // required .ServerName serverName = 2; - public static final int SERVERNAME_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; - } - - private void initFields() { - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasState()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasServerName()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServerName().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, serverName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, serverName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) obj; - - boolean result = true; - result = result && (hasState() == other.hasState()); - if (hasState()) { - result = result && - (getState() == other.getState()); - } - result = result && (hasServerName() == other.hasServerName()); - if (hasServerName()) { - result = result && getServerName() - .equals(other.getServerName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasState()) { - hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); - } - if (hasServerName()) { - hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; - hash = (53 * hash) + getServerName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerNameFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; - bitField0_ = (bitField0_ & ~0x00000001); - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.state_ = state_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (serverNameBuilder_ == null) { - result.serverName_ = serverName_; - } else { - result.serverName_ = serverNameBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance()) return this; - if (other.hasState()) { - setState(other.getState()); - } - if (other.hasServerName()) { - mergeServerName(other.getServerName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasState()) { - - return false; - } - if (!hasServerName()) { - - return false; - } - if (!getServerName().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - state_ = value; - } - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .SplitLogTask.State state = 1; - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; - public boolean hasState() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { - return state_; - } - public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - state_ = value; - onChanged(); - return this; - } - public Builder clearState() { - bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; - onChanged(); - return this; - } - - // required .ServerName serverName = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - if (serverNameBuilder_ == null) { - return serverName_; - } else { - return serverNameBuilder_.getMessage(); - } - } - public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - serverName_ = value; - onChanged(); - } else { - serverNameBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setServerName( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverNameBuilder_ == null) { - serverName_ = builderForValue.build(); - onChanged(); - } else { - serverNameBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - serverName_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); - } else { - serverName_ = value; - } - onChanged(); - } else { - serverNameBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearServerName() { - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getServerNameFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - if (serverNameBuilder_ != null) { - return serverNameBuilder_.getMessageOrBuilder(); - } else { - return serverName_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerNameFieldBuilder() { - if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, - getParentForChildren(), - isClean()); - serverName_ = null; - } - return serverNameBuilder_; - } - - // @@protoc_insertion_point(builder_scope:SplitLogTask) - } - - static { - defaultInstance = new SplitLogTask(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SplitLogTask) - } - - public interface TableOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .Table.State state = 1 [default = ENABLED]; - boolean hasState(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState(); - } - public static final class Table extends - com.google.protobuf.GeneratedMessage - implements TableOrBuilder { - // Use Table.newBuilder() to construct. - private Table(Builder builder) { - super(builder); - } - private Table(boolean noInit) {} - - private static final Table defaultInstance; - public static Table getDefaultInstance() { - return defaultInstance; - } - - public Table getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable; - } - - public enum State - implements com.google.protobuf.ProtocolMessageEnum { - ENABLED(0, 0), - DISABLED(1, 1), - DISABLING(2, 2), - ENABLING(3, 3), - ; - - public static final int ENABLED_VALUE = 0; - public static final int DISABLED_VALUE = 1; - public static final int DISABLING_VALUE = 2; - public static final int ENABLING_VALUE = 3; - - - public final int getNumber() { return value; } - - public static State valueOf(int value) { - switch (value) { - case 0: return ENABLED; - case 1: return DISABLED; - case 2: return DISABLING; - case 3: return ENABLING; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public State findValueByNumber(int number) { - return State.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor().getEnumTypes().get(0); - } - - private static final State[] VALUES = { - ENABLED, DISABLED, DISABLING, ENABLING, - }; - - public static State valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private State(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Table.State) - } - - private int bitField0_; - // required .Table.State state = 1 [default = ENABLED]; - public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_; - public boolean hasState() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() { - return state_; - } - - private void initFields() { - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasState()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) obj; - - boolean result = true; - result = result && (hasState() == other.hasState()); - if (hasState()) { - result = result && - (getState() == other.getState()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasState()) { - hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.state_ = state_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance()) return this; - if (other.hasState()) { - setState(other.getState()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasState()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - state_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required .Table.State state = 1 [default = ENABLED]; - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; - public boolean hasState() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() { - return state_; - } - public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - state_ = value; - onChanged(); - return this; - } - public Builder clearState() { - bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Table) - } - - static { - defaultInstance = new Table(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Table) - } - - public interface ReplicationPeerOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string clusterkey = 1; - boolean hasClusterkey(); - String getClusterkey(); - } - public static final class ReplicationPeer extends - com.google.protobuf.GeneratedMessage - implements ReplicationPeerOrBuilder { - // Use ReplicationPeer.newBuilder() to construct. - private ReplicationPeer(Builder builder) { - super(builder); - } - private ReplicationPeer(boolean noInit) {} - - private static final ReplicationPeer defaultInstance; - public static ReplicationPeer getDefaultInstance() { - return defaultInstance; - } - - public ReplicationPeer getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable; - } - - private int bitField0_; - // required string clusterkey = 1; - public static final int CLUSTERKEY_FIELD_NUMBER = 1; - private java.lang.Object clusterkey_; - public boolean hasClusterkey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getClusterkey() { - java.lang.Object ref = clusterkey_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - clusterkey_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getClusterkeyBytes() { - java.lang.Object ref = clusterkey_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - clusterkey_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - clusterkey_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasClusterkey()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getClusterkeyBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getClusterkeyBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) obj; - - boolean result = true; - result = result && (hasClusterkey() == other.hasClusterkey()); - if (hasClusterkey()) { - result = result && getClusterkey() - .equals(other.getClusterkey()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasClusterkey()) { - hash = (37 * hash) + CLUSTERKEY_FIELD_NUMBER; - hash = (53 * hash) + getClusterkey().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeerOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - clusterkey_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.clusterkey_ = clusterkey_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance()) return this; - if (other.hasClusterkey()) { - setClusterkey(other.getClusterkey()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasClusterkey()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - clusterkey_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string clusterkey = 1; - private java.lang.Object clusterkey_ = ""; - public boolean hasClusterkey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getClusterkey() { - java.lang.Object ref = clusterkey_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - clusterkey_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setClusterkey(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - clusterkey_ = value; - onChanged(); - return this; - } - public Builder clearClusterkey() { - bitField0_ = (bitField0_ & ~0x00000001); - clusterkey_ = getDefaultInstance().getClusterkey(); - onChanged(); - return this; - } - void setClusterkey(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - clusterkey_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:ReplicationPeer) - } - - static { - defaultInstance = new ReplicationPeer(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicationPeer) - } - - public interface ReplicationStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ReplicationState.State state = 1; - boolean hasState(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState(); - } - public static final class ReplicationState extends - com.google.protobuf.GeneratedMessage - implements ReplicationStateOrBuilder { - // Use ReplicationState.newBuilder() to construct. - private ReplicationState(Builder builder) { - super(builder); - } - private ReplicationState(boolean noInit) {} - - private static final ReplicationState defaultInstance; - public static ReplicationState getDefaultInstance() { - return defaultInstance; - } - - public ReplicationState getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable; - } - - public enum State - implements com.google.protobuf.ProtocolMessageEnum { - ENABLED(0, 0), - DISABLED(1, 1), - ; - - public static final int ENABLED_VALUE = 0; - public static final int DISABLED_VALUE = 1; - - - public final int getNumber() { return value; } - - public static State valueOf(int value) { - switch (value) { - case 0: return ENABLED; - case 1: return DISABLED; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public State findValueByNumber(int number) { - return State.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor().getEnumTypes().get(0); - } - - private static final State[] VALUES = { - ENABLED, DISABLED, - }; - - public static State valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private State(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:ReplicationState.State) - } - - private int bitField0_; - // required .ReplicationState.State state = 1; - public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_; - public boolean hasState() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { - return state_; - } - - private void initFields() { - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasState()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) obj; - - boolean result = true; - result = result && (hasState() == other.hasState()); - if (hasState()) { - result = result && - (getState() == other.getState()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasState()) { - hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationStateOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.state_ = state_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance()) return this; - if (other.hasState()) { - setState(other.getState()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasState()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - state_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required .ReplicationState.State state = 1; - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; - public boolean hasState() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { - return state_; - } - public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - state_ = value; - onChanged(); - return this; - } - public Builder clearState() { - bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ReplicationState) - } - - static { - defaultInstance = new ReplicationState(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicationState) - } - - public interface ReplicationHLogPositionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int64 position = 1; - boolean hasPosition(); - long getPosition(); - } - public static final class ReplicationHLogPosition extends - com.google.protobuf.GeneratedMessage - implements ReplicationHLogPositionOrBuilder { - // Use ReplicationHLogPosition.newBuilder() to construct. - private ReplicationHLogPosition(Builder builder) { - super(builder); - } - private ReplicationHLogPosition(boolean noInit) {} - - private static final ReplicationHLogPosition defaultInstance; - public static ReplicationHLogPosition getDefaultInstance() { - return defaultInstance; - } - - public ReplicationHLogPosition getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable; - } - - private int bitField0_; - // required int64 position = 1; - public static final int POSITION_FIELD_NUMBER = 1; - private long position_; - public boolean hasPosition() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getPosition() { - return position_; - } - - private void initFields() { - position_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPosition()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, position_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, position_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) obj; - - boolean result = true; - result = result && (hasPosition() == other.hasPosition()); - if (hasPosition()) { - result = result && (getPosition() - == other.getPosition()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPosition()) { - hash = (37 * hash) + POSITION_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getPosition()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPositionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - position_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.position_ = position_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance()) return this; - if (other.hasPosition()) { - setPosition(other.getPosition()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPosition()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - position_ = input.readInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required int64 position = 1; - private long position_ ; - public boolean hasPosition() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getPosition() { - return position_; - } - public Builder setPosition(long value) { - bitField0_ |= 0x00000001; - position_ = value; - onChanged(); - return this; - } - public Builder clearPosition() { - bitField0_ = (bitField0_ & ~0x00000001); - position_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ReplicationHLogPosition) - } - - static { - defaultInstance = new ReplicationHLogPosition(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicationHLogPosition) - } - - public interface ReplicationLockOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string lockOwner = 1; - boolean hasLockOwner(); - String getLockOwner(); - } - public static final class ReplicationLock extends - com.google.protobuf.GeneratedMessage - implements ReplicationLockOrBuilder { - // Use ReplicationLock.newBuilder() to construct. - private ReplicationLock(Builder builder) { - super(builder); - } - private ReplicationLock(boolean noInit) {} - - private static final ReplicationLock defaultInstance; - public static ReplicationLock getDefaultInstance() { - return defaultInstance; - } - - public ReplicationLock getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable; - } - - private int bitField0_; - // required string lockOwner = 1; - public static final int LOCKOWNER_FIELD_NUMBER = 1; - private java.lang.Object lockOwner_; - public boolean hasLockOwner() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getLockOwner() { - java.lang.Object ref = lockOwner_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - lockOwner_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getLockOwnerBytes() { - java.lang.Object ref = lockOwner_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - lockOwner_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - lockOwner_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLockOwner()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getLockOwnerBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getLockOwnerBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) obj; - - boolean result = true; - result = result && (hasLockOwner() == other.hasLockOwner()); - if (hasLockOwner()) { - result = result && getLockOwner() - .equals(other.getLockOwner()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLockOwner()) { - hash = (37 * hash) + LOCKOWNER_FIELD_NUMBER; - hash = (53 * hash) + getLockOwner().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLockOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lockOwner_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock build() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lockOwner_ = lockOwner_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance()) return this; - if (other.hasLockOwner()) { - setLockOwner(other.getLockOwner()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLockOwner()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - lockOwner_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string lockOwner = 1; - private java.lang.Object lockOwner_ = ""; - public boolean hasLockOwner() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getLockOwner() { - java.lang.Object ref = lockOwner_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - lockOwner_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setLockOwner(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - lockOwner_ = value; - onChanged(); - return this; - } - public Builder clearLockOwner() { - bitField0_ = (bitField0_ & ~0x00000001); - lockOwner_ = getDefaultInstance().getLockOwner(); - onChanged(); - return this; - } - void setLockOwner(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - lockOwner_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:ReplicationLock) - } - - static { - defaultInstance = new ReplicationLock(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicationLock) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RootRegionServer_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RootRegionServer_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Master_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Master_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ClusterUp_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ClusterUp_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RegionTransition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RegionTransition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SplitLogTask_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SplitLogTask_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Table_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Table_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicationPeer_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicationPeer_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicationState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicationState_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicationHLogPosition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicationHLogPosition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicationLock_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicationLock_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\017ZooKeeper.proto\032\013hbase.proto\"/\n\020RootRe" + - "gionServer\022\033\n\006server\030\001 \002(\0132\013.ServerName\"" + - "%\n\006Master\022\033\n\006master\030\001 \002(\0132\013.ServerName\"\036" + - "\n\tClusterUp\022\021\n\tstartDate\030\001 \002(\t\"\203\001\n\020Regio" + - "nTransition\022\025\n\reventTypeCode\030\001 \002(\r\022\022\n\nre" + - "gionName\030\002 \002(\014\022\022\n\ncreateTime\030\003 \002(\004\022\037\n\nse" + - "rverName\030\004 \002(\0132\013.ServerName\022\017\n\007payload\030\005" + - " \001(\014\"\230\001\n\014SplitLogTask\022\"\n\005state\030\001 \002(\0162\023.S" + - "plitLogTask.State\022\037\n\nserverName\030\002 \002(\0132\013." + - "ServerName\"C\n\005State\022\016\n\nUNASSIGNED\020\000\022\t\n\005O", - "WNED\020\001\022\014\n\010RESIGNED\020\002\022\010\n\004DONE\020\003\022\007\n\003ERR\020\004\"" + - "n\n\005Table\022$\n\005state\030\001 \002(\0162\014.Table.State:\007E" + - "NABLED\"?\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLED" + - "\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLING\020\003\"%\n\017Repli" + - "cationPeer\022\022\n\nclusterkey\030\001 \002(\t\"^\n\020Replic" + - "ationState\022&\n\005state\030\001 \002(\0162\027.ReplicationS" + - "tate.State\"\"\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISA" + - "BLED\020\001\"+\n\027ReplicationHLogPosition\022\020\n\010pos" + - "ition\030\001 \002(\003\"$\n\017ReplicationLock\022\021\n\tlockOw" + - "ner\030\001 \002(\tBE\n*org.apache.hadoop.hbase.pro", - "tobuf.generatedB\017ZooKeeperProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_RootRegionServer_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_RootRegionServer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RootRegionServer_descriptor, - new java.lang.String[] { "Server", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RootRegionServer.Builder.class); - internal_static_Master_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_Master_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Master_descriptor, - new java.lang.String[] { "Master", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class); - internal_static_ClusterUp_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_ClusterUp_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ClusterUp_descriptor, - new java.lang.String[] { "StartDate", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); - internal_static_RegionTransition_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_RegionTransition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionTransition_descriptor, - new java.lang.String[] { "EventTypeCode", "RegionName", "CreateTime", "ServerName", "Payload", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class); - internal_static_SplitLogTask_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_SplitLogTask_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SplitLogTask_descriptor, - new java.lang.String[] { "State", "ServerName", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); - internal_static_Table_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_Table_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Table_descriptor, - new java.lang.String[] { "State", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class); - internal_static_ReplicationPeer_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_ReplicationPeer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicationPeer_descriptor, - new java.lang.String[] { "Clusterkey", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class); - internal_static_ReplicationState_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_ReplicationState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicationState_descriptor, - new java.lang.String[] { "State", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class); - internal_static_ReplicationHLogPosition_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_ReplicationHLogPosition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicationHLogPosition_descriptor, - new java.lang.String[] { "Position", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class); - internal_static_ReplicationLock_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_ReplicationLock_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicationLock_descriptor, - new java.lang.String[] { "LockOwner", }, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, - org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git hbase-server/src/main/protobuf/AccessControl.proto hbase-server/src/main/protobuf/AccessControl.proto deleted file mode 100644 index ea77282..0000000 --- hbase-server/src/main/protobuf/AccessControl.proto +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "AccessControlProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -message Permission { - enum Action { - READ = 0; - WRITE = 1; - EXEC = 2; - CREATE = 3; - ADMIN = 4; - } - repeated Action action = 1; - optional bytes table = 2; - optional bytes family = 3; - optional bytes qualifier = 4; -} - -message UserPermission { - required bytes user = 1; - required Permission permission = 2; -} - -/** - * Content of the /hbase/acl/

znode. - */ -message UserTablePermissions { - message UserPermissions { - required bytes user = 1; - repeated Permission permissions = 2; - } - - repeated UserPermissions permissions = 1; -} - -message GrantRequest { - required UserPermission permission = 1; -} - -message GrantResponse { -} - -message RevokeRequest { - required UserPermission permission = 1; - -} - -message RevokeResponse { -} - - -message UserPermissionsRequest { - required bytes table = 1; -} - -message UserPermissionsResponse { - repeated UserPermission permission = 1; -} - -message CheckPermissionsRequest { - repeated Permission permission = 1; -} - -message CheckPermissionsResponse { -} - -service AccessControlService { - rpc grant(GrantRequest) - returns (GrantResponse); - - rpc revoke(RevokeRequest) - returns (RevokeResponse); - - rpc getUserPermissions(UserPermissionsRequest) - returns (UserPermissionsResponse); - - rpc checkPermissions(CheckPermissionsRequest) - returns (CheckPermissionsResponse); -} diff --git hbase-server/src/main/protobuf/Admin.proto hbase-server/src/main/protobuf/Admin.proto deleted file mode 100644 index b7d8549..0000000 --- hbase-server/src/main/protobuf/Admin.proto +++ /dev/null @@ -1,255 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for Admin service. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "AdminProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; - -message GetRegionInfoRequest { - required RegionSpecifier region = 1; - optional bool compactionState = 2; -} - -message GetRegionInfoResponse { - required RegionInfo regionInfo = 1; - optional CompactionState compactionState = 2; - - enum CompactionState { - NONE = 0; - MINOR = 1; - MAJOR = 2; - MAJOR_AND_MINOR = 3; - } -} - -/** - * Get a list of store files for a set of column families in a particular region. - * If no column family is specified, get the store files for all column families. - */ -message GetStoreFileRequest { - required RegionSpecifier region = 1; - repeated bytes family = 2; -} - -message GetStoreFileResponse { - repeated string storeFile = 1; -} - -message GetOnlineRegionRequest { -} - -message GetOnlineRegionResponse { - repeated RegionInfo regionInfo = 1; -} - -message OpenRegionRequest { - repeated RegionOpenInfo openInfo = 1; - - message RegionOpenInfo { - required RegionInfo region = 1; - optional uint32 versionOfOfflineNode = 2; - } -} - -message OpenRegionResponse { - repeated RegionOpeningState openingState = 1; - - enum RegionOpeningState { - OPENED = 0; - ALREADY_OPENED = 1; - FAILED_OPENING = 2; - } -} - -/** - * Closes the specified region and will use or not use ZK during the close - * according to the specified flag. - */ -message CloseRegionRequest { - required RegionSpecifier region = 1; - optional uint32 versionOfClosingNode = 2; - optional bool transitionInZK = 3 [default = true]; - optional ServerName destinationServer = 4; -} - -message CloseRegionResponse { - required bool closed = 1; -} - -/** - * Flushes the MemStore of the specified region. - *

- * This method is synchronous. - */ -message FlushRegionRequest { - required RegionSpecifier region = 1; - optional uint64 ifOlderThanTs = 2; -} - -message FlushRegionResponse { - required uint64 lastFlushTime = 1; - optional bool flushed = 2; -} - -/** - * Splits the specified region. - *

- * This method currently flushes the region and then forces a compaction which - * will then trigger a split. The flush is done synchronously but the - * compaction is asynchronous. - */ -message SplitRegionRequest { - required RegionSpecifier region = 1; - optional bytes splitPoint = 2; -} - -message SplitRegionResponse { -} - -/** - * Compacts the specified region. Performs a major compaction if specified. - *

- * This method is asynchronous. - */ -message CompactRegionRequest { - required RegionSpecifier region = 1; - optional bool major = 2; - optional bytes family = 3; -} - -message CompactRegionResponse { -} - -message UUID { - required uint64 leastSigBits = 1; - required uint64 mostSigBits = 2; -} - -// Protocol buffer version of HLog -message WALEntry { - required WALKey key = 1; - required WALEdit edit = 2; - - // Protocol buffer version of HLogKey - message WALKey { - required bytes encodedRegionName = 1; - required bytes tableName = 2; - required uint64 logSequenceNumber = 3; - required uint64 writeTime = 4; - optional UUID clusterId = 5; - } - - message WALEdit { - repeated bytes keyValueBytes = 1; - repeated FamilyScope familyScope = 2; - - enum ScopeType { - REPLICATION_SCOPE_LOCAL = 0; - REPLICATION_SCOPE_GLOBAL = 1; - } - - message FamilyScope { - required bytes family = 1; - required ScopeType scopeType = 2; - } - } -} - -/** - * Replicates the given entries. The guarantee is that the given entries - * will be durable on the slave cluster if this method returns without - * any exception. - * hbase.replication has to be set to true for this to work. - */ -message ReplicateWALEntryRequest { - repeated WALEntry entry = 1; -} - -message ReplicateWALEntryResponse { -} - -message RollWALWriterRequest { -} - -message RollWALWriterResponse { - // A list of encoded name of regions to flush - repeated bytes regionToFlush = 1; -} - -message StopServerRequest { - required string reason = 1; -} - -message StopServerResponse { -} - -message GetServerInfoRequest { -} - -message ServerInfo { - required ServerName serverName = 1; - optional uint32 webuiPort = 2; -} - -message GetServerInfoResponse { - required ServerInfo serverInfo = 1; -} - -service AdminService { - rpc getRegionInfo(GetRegionInfoRequest) - returns(GetRegionInfoResponse); - - rpc getStoreFile(GetStoreFileRequest) - returns(GetStoreFileResponse); - - rpc getOnlineRegion(GetOnlineRegionRequest) - returns(GetOnlineRegionResponse); - - rpc openRegion(OpenRegionRequest) - returns(OpenRegionResponse); - - rpc closeRegion(CloseRegionRequest) - returns(CloseRegionResponse); - - rpc flushRegion(FlushRegionRequest) - returns(FlushRegionResponse); - - rpc splitRegion(SplitRegionRequest) - returns(SplitRegionResponse); - - rpc compactRegion(CompactRegionRequest) - returns(CompactRegionResponse); - - rpc replicateWALEntry(ReplicateWALEntryRequest) - returns(ReplicateWALEntryResponse); - - rpc rollWALWriter(RollWALWriterRequest) - returns(RollWALWriterResponse); - - rpc getServerInfo(GetServerInfoRequest) - returns(GetServerInfoResponse); - - rpc stopServer(StopServerRequest) - returns(StopServerResponse); -} diff --git hbase-server/src/main/protobuf/Aggregate.proto hbase-server/src/main/protobuf/Aggregate.proto deleted file mode 100644 index 5707e14..0000000 --- hbase-server/src/main/protobuf/Aggregate.proto +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "AggregateProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "Client.proto"; - -message AggregateArgument { - /** The argument passed to the AggregateService consists of three parts - * (1) the (canonical) classname of the ColumnInterpreter implementation - * (2) the Scan query - * (3) any bytes required to construct the ColumnInterpreter object - * properly - */ - required string interpreterClassName = 1; - required Scan scan = 2; - optional bytes interpreterSpecificBytes = 3; -} - -message AggregateResponse { - /** - * The AggregateService methods all have a response that either is a Pair - * or a simple object. When it is a Pair both firstPart and secondPart - * have defined values (and the secondPart is not present in the response - * when the response is not a pair). Refer to the AggregateImplementation - * class for an overview of the AggregateResponse object constructions. - */ - repeated bytes firstPart = 1; - optional bytes secondPart = 2; -} - -/** Refer to the AggregateImplementation class for an overview of the - * AggregateService method implementations and their functionality. - */ -service AggregateService { - rpc getMax (AggregateArgument) returns (AggregateResponse); - rpc getMin (AggregateArgument) returns (AggregateResponse); - rpc getSum (AggregateArgument) returns (AggregateResponse); - rpc getRowNum (AggregateArgument) returns (AggregateResponse); - rpc getAvg (AggregateArgument) returns (AggregateResponse); - rpc getStd (AggregateArgument) returns (AggregateResponse); - rpc getMedian (AggregateArgument) returns (AggregateResponse); -} \ No newline at end of file diff --git hbase-server/src/main/protobuf/BulkDelete.proto hbase-server/src/main/protobuf/BulkDelete.proto deleted file mode 100644 index a0e56dd..0000000 --- hbase-server/src/main/protobuf/BulkDelete.proto +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated"; -option java_outer_classname = "BulkDeleteProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "Client.proto"; - -message BulkDeleteRequest { - required Scan scan = 1; - required DeleteType deleteType = 2; - optional uint64 timestamp = 3; - required uint32 rowBatchSize = 4; - - enum DeleteType { - ROW = 0; - FAMILY = 1; - COLUMN = 2; - VERSION = 3; - } -} - -message BulkDeleteResponse { - required uint64 rowsDeleted = 1; - optional uint64 versionsDeleted = 2; -} - -service BulkDeleteService { - rpc delete(BulkDeleteRequest) - returns (BulkDeleteResponse); -} \ No newline at end of file diff --git hbase-server/src/main/protobuf/Client.proto hbase-server/src/main/protobuf/Client.proto deleted file mode 100644 index dd34998..0000000 --- hbase-server/src/main/protobuf/Client.proto +++ /dev/null @@ -1,385 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for Client service. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "ClientProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; -import "Comparator.proto"; - -/** - * Container for a list of column qualifier names of a family. - */ -message Column { - required bytes family = 1; - repeated bytes qualifier = 2; -} - -/** - * The protocol buffer version of Get - */ -message Get { - required bytes row = 1; - repeated Column column = 2; - repeated NameBytesPair attribute = 3; - optional uint64 lockId = 4; - optional Filter filter = 5; - optional TimeRange timeRange = 6; - optional uint32 maxVersions = 7 [default = 1]; - optional bool cacheBlocks = 8 [default = true]; - optional uint32 storeLimit = 9; - optional uint32 storeOffset = 10; -} - -/** - * For performance reason, we don't use KeyValue - * here. We use the actual KeyValue bytes. - */ -message Result { - repeated bytes keyValueBytes = 1; -} - -/** - * The get request. Perform a single Get operation. - * Unless existenceOnly is specified, return all the requested data - * for the row that matches exactly, or the one that immediately - * precedes it if closestRowBefore is specified. - * - * If existenceOnly is set, only the existence will be returned. - */ -message GetRequest { - required RegionSpecifier region = 1; - required Get get = 2; - - // If the row to get doesn't exist, return the - // closest row before. - optional bool closestRowBefore = 3; - - // The result isn't asked for, just check for - // the existence. If specified, closestRowBefore - // will be ignored - optional bool existenceOnly = 4; -} - -message GetResponse { - optional Result result = 1; - - // used for Get to check existence only - optional bool exists = 2; -} - -/** - * Condition to check if the value of a given cell (row, - * family, qualifier) matches a value via a given comparator. - * - * Condition is used in check and mutate operations. - */ -message Condition { - required bytes row = 1; - required bytes family = 2; - required bytes qualifier = 3; - required CompareType compareType = 4; - required Comparator comparator = 5; -} - -/** - * A specific mutate inside a mutate request. - * It can be an append, increment, put or delete based - * on the mutate type. - */ -message Mutate { - required bytes row = 1; - required MutateType mutateType = 2; - repeated ColumnValue columnValue = 3; - repeated NameBytesPair attribute = 4; - optional uint64 timestamp = 5; - optional uint64 lockId = 6; - optional bool writeToWAL = 7 [default = true]; - - // For some mutate, result may be returned, in which case, - // time range can be specified for potential performance gain - optional TimeRange timeRange = 10; - - enum MutateType { - APPEND = 0; - INCREMENT = 1; - PUT = 2; - DELETE = 3; - } - - enum DeleteType { - DELETE_ONE_VERSION = 0; - DELETE_MULTIPLE_VERSIONS = 1; - DELETE_FAMILY = 2; - } - - message ColumnValue { - required bytes family = 1; - repeated QualifierValue qualifierValue = 2; - - message QualifierValue { - optional bytes qualifier = 1; - optional bytes value = 2; - optional uint64 timestamp = 3; - optional DeleteType deleteType = 4; - } - } -} - -/** - * The mutate request. Perform a single Mutate operation. - * - * Optionally, you can specify a condition. The mutate - * will take place only if the condition is met. Otherwise, - * the mutate will be ignored. In the response result, - * parameter processed is used to indicate if the mutate - * actually happened. - */ -message MutateRequest { - required RegionSpecifier region = 1; - required Mutate mutate = 2; - optional Condition condition = 3; -} - -message MutateResponse { - optional Result result = 1; - - // used for mutate to indicate processed only - optional bool processed = 2; -} - -/** - * Instead of get from a table, you can scan it with optional filters. - * You can specify the row key range, time range, the columns/families - * to scan and so on. - * - * This scan is used the first time in a scan request. The response of - * the initial scan will return a scanner id, which should be used to - * fetch result batches later on before it is closed. - */ -message Scan { - repeated Column column = 1; - repeated NameBytesPair attribute = 2; - optional bytes startRow = 3; - optional bytes stopRow = 4; - optional Filter filter = 5; - optional TimeRange timeRange = 6; - optional uint32 maxVersions = 7 [default = 1]; - optional bool cacheBlocks = 8 [default = true]; - optional uint32 batchSize = 9; - optional uint64 maxResultSize = 10; - optional uint32 storeLimit = 11; - optional uint32 storeOffset = 12; -} - -/** - * A scan request. Initially, it should specify a scan. Later on, you - * can use the scanner id returned to fetch result batches with a different - * scan request. - * - * The scanner will remain open if there are more results, and it's not - * asked to be closed explicitly. - * - * You can fetch the results and ask the scanner to be closed to save - * a trip if you are not interested in remaining results. - */ -message ScanRequest { - optional RegionSpecifier region = 1; - optional Scan scan = 2; - optional uint64 scannerId = 3; - optional uint32 numberOfRows = 4; - optional bool closeScanner = 5; - optional uint64 nextCallSeq = 6; -} - -/** - * The scan response. If there are no more results, moreResults will - * be false. If it is not specified, it means there are more. - */ -message ScanResponse { - repeated Result result = 1; - optional uint64 scannerId = 2; - optional bool moreResults = 3; - optional uint32 ttl = 4; -} - -message LockRowRequest { - required RegionSpecifier region = 1; - repeated bytes row = 2; -} - -message LockRowResponse { - required uint64 lockId = 1; - optional uint32 ttl = 2; -} - -message UnlockRowRequest { - required RegionSpecifier region = 1; - required uint64 lockId = 2; -} - -message UnlockRowResponse { -} - -/** - * Atomically bulk load multiple HFiles (say from different column families) - * into an open region. - */ -message BulkLoadHFileRequest { - required RegionSpecifier region = 1; - repeated FamilyPath familyPath = 2; - optional bool assignSeqNum = 3; - - message FamilyPath { - required bytes family = 1; - required string path = 2; - } -} - -message BulkLoadHFileResponse { - required bool loaded = 1; -} - -/** - * An individual coprocessor call. You must specify the protocol, - * the method, and the row to which the call will be executed. - * - * You can specify the configuration settings in the property list. - * - * The parameter list has the parameters used for the method. - * A parameter is a pair of parameter name and the binary parameter - * value. The name is the parameter class name. The value is the - * binary format of the parameter, for example, protocol buffer - * encoded value. - */ -message Exec { - required bytes row = 1; - required string protocolName = 2; - required string methodName = 3; - repeated NameStringPair property = 4; - repeated NameBytesPair parameter = 5; -} - - /** - * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} - * method using the registered protocol handlers. - * {@link CoprocessorProtocol} implementations must be registered via the - * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol( - * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} - * method before they are available. - */ -message ExecCoprocessorRequest { - required RegionSpecifier region = 1; - required Exec call = 2; -} - -message ExecCoprocessorResponse { - required NameBytesPair value = 1; -} - -message CoprocessorServiceCall { - required bytes row = 1; - required string serviceName = 2; - required string methodName = 3; - required bytes request = 4; -} - -message CoprocessorServiceRequest { - required RegionSpecifier region = 1; - required CoprocessorServiceCall call = 2; -} - -message CoprocessorServiceResponse { - required RegionSpecifier region = 1; - required NameBytesPair value = 2; -} - -/** - * An action that is part of MultiRequest. - * This is a union type - exactly one of the fields will be set. - */ -message MultiAction { - optional Mutate mutate = 1; - optional Get get = 2; - optional Exec exec = 3; -} - -/** - * An individual action result. The result will in the - * same order as the action in the request. If an action - * returns a value, it is set in value field. If it doesn't - * return anything, the result will be empty. If an action - * fails to execute due to any exception, the exception - * is returned as a stringified parameter. - */ -message ActionResult { - optional NameBytesPair value = 1; - optional NameBytesPair exception = 2; -} - -/** - * You can execute a list of actions on a given region in order. - * - * If it is a list of mutate actions, atomic can be set - * to make sure they can be processed atomically, just like - * RowMutations. - */ -message MultiRequest { - required RegionSpecifier region = 1; - repeated MultiAction action = 2; - optional bool atomic = 3; -} - -message MultiResponse { - repeated ActionResult result = 1; -} - - -service ClientService { - rpc get(GetRequest) - returns(GetResponse); - - rpc mutate(MutateRequest) - returns(MutateResponse); - - rpc scan(ScanRequest) - returns(ScanResponse); - - rpc lockRow(LockRowRequest) - returns(LockRowResponse); - - rpc unlockRow(UnlockRowRequest) - returns(UnlockRowResponse); - - rpc bulkLoadHFile(BulkLoadHFileRequest) - returns(BulkLoadHFileResponse); - - rpc execCoprocessor(ExecCoprocessorRequest) - returns(ExecCoprocessorResponse); - - rpc execService(CoprocessorServiceRequest) - returns(CoprocessorServiceResponse); - - rpc multi(MultiRequest) - returns(MultiResponse); -} diff --git hbase-server/src/main/protobuf/ClusterId.proto hbase-server/src/main/protobuf/ClusterId.proto deleted file mode 100644 index 80b7d0f..0000000 --- hbase-server/src/main/protobuf/ClusterId.proto +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are shared throughout HBase - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "ClusterIdProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -/** - * Content of the '/hbase/hbaseid', cluster id, znode. - * Also cluster of the ${HBASE_ROOTDIR}/hbase.id file. - */ -message ClusterId { - // This is the cluster id, a uuid as a String - required string clusterId = 1; -} diff --git hbase-server/src/main/protobuf/ClusterStatus.proto hbase-server/src/main/protobuf/ClusterStatus.proto deleted file mode 100644 index 73f20f5..0000000 --- hbase-server/src/main/protobuf/ClusterStatus.proto +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for ClustStatus - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "ClusterStatusProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; -import "ClusterId.proto"; -import "FS.proto"; - -message RegionState { - required RegionInfo regionInfo = 1; - required State state = 2; - optional uint64 stamp = 3; - enum State { - OFFLINE = 0; // region is in an offline state - PENDING_OPEN = 1; // sent rpc to server to open but has not begun - OPENING = 2; // server has begun to open but not yet done - OPEN = 3; // server opened region and updated meta - PENDING_CLOSE = 4; // sent rpc to server to close but has not begun - CLOSING = 5; // server has begun to close but not yet done - CLOSED = 6; // server closed region and updated meta - SPLITTING = 7; // server started split of a region - SPLIT = 8; // server completed split of a region - } -} - -message RegionInTransition { - required RegionSpecifier spec = 1; - required RegionState regionState = 2; -} - -message LiveServerInfo { - required ServerName server = 1; - required ServerLoad serverLoad = 2; -} - -message ClusterStatus { - optional HBaseVersionFileContent hbaseVersion = 1; - repeated LiveServerInfo liveServers = 2; - repeated ServerName deadServers = 3; - repeated RegionInTransition regionsInTransition = 4; - optional ClusterId clusterId = 5; - repeated Coprocessor masterCoprocessors = 6; - optional ServerName master = 7; - repeated ServerName backupMasters = 8; - optional bool balancerOn = 9; -} diff --git hbase-server/src/main/protobuf/Comparator.proto hbase-server/src/main/protobuf/Comparator.proto deleted file mode 100644 index d4c1682..0000000 --- hbase-server/src/main/protobuf/Comparator.proto +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for filters - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "ComparatorProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -// This file contains protocol buffers that are used for comparators (e.g. in filters) - -message Comparator { - required string name = 1; - optional bytes serializedComparator = 2; -} - -message ByteArrayComparable { - optional bytes value = 1; -} - -message BinaryComparator { - required ByteArrayComparable comparable = 1; -} - -message BinaryPrefixComparator { - required ByteArrayComparable comparable = 1; -} - -message BitComparator { - required ByteArrayComparable comparable = 1; - required BitwiseOp bitwiseOp = 2; - - enum BitwiseOp { - AND = 1; - OR = 2; - XOR = 3; - } -} - -message NullComparator { -} - -message RegexStringComparator { - required string pattern = 1; - required int32 patternFlags = 2; - required string charset = 3; -} - -message SubstringComparator { - required string substr = 1; -} diff --git hbase-server/src/main/protobuf/Examples.proto hbase-server/src/main/protobuf/Examples.proto deleted file mode 100644 index 24fbad6..0000000 --- hbase-server/src/main/protobuf/Examples.proto +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated"; -option java_outer_classname = "ExampleProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -message CountRequest { -} - -message CountResponse { - required int64 count = 1 [default = 0]; -} - -service RowCountService { - rpc getRowCount(CountRequest) - returns (CountResponse); - rpc getKeyValueCount(CountRequest) - returns (CountResponse); -} \ No newline at end of file diff --git hbase-server/src/main/protobuf/FS.proto hbase-server/src/main/protobuf/FS.proto deleted file mode 100644 index ca819e3..0000000 --- hbase-server/src/main/protobuf/FS.proto +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are written into the filesystem - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "FSProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -/** - * The ${HBASE_ROOTDIR}/hbase.version file content - */ -message HBaseVersionFileContent { - required string version = 1; -} - -/** - * Reference file content used when we split an hfile under a region. - */ -message Reference { - required bytes splitkey = 1; - enum Range { - TOP = 0; - BOTTOM = 1; - } - required Range range = 2; -} - diff --git hbase-server/src/main/protobuf/Filter.proto hbase-server/src/main/protobuf/Filter.proto deleted file mode 100644 index de79106..0000000 --- hbase-server/src/main/protobuf/Filter.proto +++ /dev/null @@ -1,152 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for filters - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "FilterProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; -import "Comparator.proto"; - -message ColumnCountGetFilter { - required int32 limit = 1; -} - -message ColumnPaginationFilter { - required int32 limit = 1; - optional int32 offset = 2; -} - -message ColumnPrefixFilter { - required bytes prefix = 1; -} - -message ColumnRangeFilter { - optional bytes minColumn = 1; - optional bool minColumnInclusive = 2; - optional bytes maxColumn = 3; - optional bool maxColumnInclusive = 4; -} - -message CompareFilter { - required CompareType compareOp = 1; - optional Comparator comparator = 2; -} - -message DependentColumnFilter { - required CompareFilter compareFilter = 1; - optional bytes columnFamily = 2; - optional bytes columnQualifier = 3; - optional bool dropDependentColumn = 4; -} - -message FamilyFilter { - required CompareFilter compareFilter = 1; -} - -message FilterList { - required Operator operator = 1; - repeated Filter filters = 2; - - enum Operator { - MUST_PASS_ALL = 1; - MUST_PASS_ONE = 2; - } -} - -message FilterWrapper { - required Filter filter = 1; -} - -message FirstKeyOnlyFilter { -} - -message FirstKeyValueMatchingQualifiersFilter { - repeated bytes qualifiers = 1; -} - -message FuzzyRowFilter { - repeated BytesBytesPair fuzzyKeysData = 1; -} - -message InclusiveStopFilter { - optional bytes stopRowKey = 1; -} - -message KeyOnlyFilter { - required bool lenAsVal = 1; -} - -message MultipleColumnPrefixFilter { - repeated bytes sortedPrefixes = 1; -} - -message PageFilter { - required int64 pageSize = 1; -} - -message PrefixFilter { - optional bytes prefix = 1; -} - -message QualifierFilter { - required CompareFilter compareFilter = 1; -} - -message RandomRowFilter { - required float chance = 1; -} - -message RowFilter { - required CompareFilter compareFilter = 1; -} - -message SingleColumnValueExcludeFilter { - required SingleColumnValueFilter singleColumnValueFilter = 1; -} - -message SingleColumnValueFilter { - optional bytes columnFamily = 1; - optional bytes columnQualifier = 2; - required CompareType compareOp = 3; - required Comparator comparator = 4; - optional bool foundColumn = 5; - optional bool matchedColumn = 6; - optional bool filterIfMissing = 7; - optional bool latestVersionOnly = 8; -} - -message SkipFilter { - required Filter filter = 1; -} - -message TimestampsFilter { - repeated int64 timestamps = 1; -} - -message ValueFilter { - required CompareFilter compareFilter = 1; -} - -message WhileMatchFilter { - required Filter filter = 1; -} diff --git hbase-server/src/main/protobuf/LoadBalancer.proto hbase-server/src/main/protobuf/LoadBalancer.proto deleted file mode 100644 index df1c049..0000000 --- hbase-server/src/main/protobuf/LoadBalancer.proto +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers to represent the state of the load balancer. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "LoadBalancerProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -message LoadBalancerState { - optional bool balancerOn = 1; -} diff --git hbase-server/src/main/protobuf/Master.proto hbase-server/src/main/protobuf/Master.proto deleted file mode 100644 index e2ff35b..0000000 --- hbase-server/src/main/protobuf/Master.proto +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for protocols implemented by the master. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "MasterProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -message IsMasterRunningRequest { -} - -message IsMasterRunningResponse { - required bool isMasterRunning = 1; -} - -service MasterService { - /** return true if master is available */ - rpc isMasterRunning(IsMasterRunningRequest) - returns(IsMasterRunningResponse); -} diff --git hbase-server/src/main/protobuf/MasterAdmin.proto hbase-server/src/main/protobuf/MasterAdmin.proto deleted file mode 100644 index dc62bb4..0000000 --- hbase-server/src/main/protobuf/MasterAdmin.proto +++ /dev/null @@ -1,283 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for MasterAdminProtocol. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "MasterAdminProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; -import "Client.proto"; - -/* Column-level protobufs */ - -message AddColumnRequest { - required bytes tableName = 1; - required ColumnFamilySchema columnFamilies = 2; -} - -message AddColumnResponse { -} - -message DeleteColumnRequest { - required bytes tableName = 1; - required bytes columnName = 2; -} - -message DeleteColumnResponse { -} - -message ModifyColumnRequest { - required bytes tableName = 1; - required ColumnFamilySchema columnFamilies = 2; -} - -message ModifyColumnResponse { -} - -/* Region-level Protos */ - -message MoveRegionRequest { - required RegionSpecifier region = 1; - optional ServerName destServerName = 2; -} - -message MoveRegionResponse { -} - -message AssignRegionRequest { - required RegionSpecifier region = 1; -} - -message AssignRegionResponse { -} - -message UnassignRegionRequest { - required RegionSpecifier region = 1; - optional bool force = 2 [default = false]; -} - -message UnassignRegionResponse { -} - -message OfflineRegionRequest { - required RegionSpecifier region = 1; -} - -message OfflineRegionResponse { -} - -/* Table-level protobufs */ - -message CreateTableRequest { - required TableSchema tableSchema = 1; - repeated bytes splitKeys = 2; -} - -message CreateTableResponse { -} - -message DeleteTableRequest { - required bytes tableName = 1; -} - -message DeleteTableResponse { -} - -message EnableTableRequest { - required bytes tableName = 1; -} - -message EnableTableResponse { -} - -message DisableTableRequest { - required bytes tableName = 1; -} - -message DisableTableResponse { -} - -message ModifyTableRequest { - required bytes tableName = 1; - required TableSchema tableSchema = 2; -} - -message ModifyTableResponse { -} - -/* Cluster-level protobufs */ - - -message ShutdownRequest { -} - -message ShutdownResponse { -} - -message StopMasterRequest { -} - -message StopMasterResponse { -} - -message BalanceRequest { -} - -message BalanceResponse { - required bool balancerRan = 1; -} - -message SetBalancerRunningRequest { - required bool on = 1; - optional bool synchronous = 2; -} - -message SetBalancerRunningResponse { - optional bool prevBalanceValue = 1; -} - -message CatalogScanRequest { -} - -message CatalogScanResponse { - optional int32 scanResult = 1; -} - -message EnableCatalogJanitorRequest { - required bool enable = 1; -} - -message EnableCatalogJanitorResponse { - optional bool prevValue = 1; -} - -message IsCatalogJanitorEnabledRequest { -} - -message IsCatalogJanitorEnabledResponse { - required bool value = 1; -} - -service MasterAdminService { - /** Adds a column to the specified table. */ - rpc addColumn(AddColumnRequest) - returns(AddColumnResponse); - - /** Deletes a column from the specified table. Table must be disabled. */ - rpc deleteColumn(DeleteColumnRequest) - returns(DeleteColumnResponse); - - /** Modifies an existing column on the specified table. */ - rpc modifyColumn(ModifyColumnRequest) - returns(ModifyColumnResponse); - - /** Move the region region to the destination server. */ - rpc moveRegion(MoveRegionRequest) - returns(MoveRegionResponse); - - /** Assign a region to a server chosen at random. */ - rpc assignRegion(AssignRegionRequest) - returns(AssignRegionResponse); - - /** - * Unassign a region from current hosting regionserver. Region will then be - * assigned to a regionserver chosen at random. Region could be reassigned - * back to the same server. Use moveRegion if you want - * to control the region movement. - */ - rpc unassignRegion(UnassignRegionRequest) - returns(UnassignRegionResponse); - - /** - * Offline a region from the assignment manager's in-memory state. The - * region should be in a closed state and there will be no attempt to - * automatically reassign the region as in unassign. This is a special - * method, and should only be used by experts or hbck. - */ - rpc offlineRegion(OfflineRegionRequest) - returns(OfflineRegionResponse); - - /** Deletes a table */ - rpc deleteTable(DeleteTableRequest) - returns(DeleteTableResponse); - - /** Puts the table on-line (only needed if table has been previously taken offline) */ - rpc enableTable(EnableTableRequest) - returns(EnableTableResponse); - - /** Take table offline */ - rpc disableTable(DisableTableRequest) - returns(DisableTableResponse); - - /** Modify a table's metadata */ - rpc modifyTable(ModifyTableRequest) - returns(ModifyTableResponse); - - /** Creates a new table asynchronously */ - rpc createTable(CreateTableRequest) - returns(CreateTableResponse); - - /** Shutdown an HBase cluster. */ - rpc shutdown(ShutdownRequest) - returns(ShutdownResponse); - - /** Stop HBase Master only. Does not shutdown the cluster. */ - rpc stopMaster(StopMasterRequest) - returns(StopMasterResponse); - - /** - * Run the balancer. Will run the balancer and if regions to move, it will - * go ahead and do the reassignments. Can NOT run for various reasons. - * Check logs. - */ - rpc balance(BalanceRequest) - returns(BalanceResponse); - - /** - * Turn the load balancer on or off. - * If synchronous is true, it waits until current balance() call, if outstanding, to return. - */ - rpc setBalancerRunning(SetBalancerRunningRequest) - returns(SetBalancerRunningResponse); - - /** Get a run of the catalog janitor */ - rpc runCatalogScan(CatalogScanRequest) - returns(CatalogScanResponse); - - /** - * Enable the catalog janitor on or off. - */ - rpc enableCatalogJanitor(EnableCatalogJanitorRequest) - returns(EnableCatalogJanitorResponse); - - /** - * Query whether the catalog janitor is enabled. - */ - rpc isCatalogJanitorEnabled(IsCatalogJanitorEnabledRequest) - returns(IsCatalogJanitorEnabledResponse); - - /** - * Call a master coprocessor endpoint - */ - rpc execMasterService(CoprocessorServiceRequest) - returns(CoprocessorServiceResponse); -} diff --git hbase-server/src/main/protobuf/MasterMonitor.proto hbase-server/src/main/protobuf/MasterMonitor.proto deleted file mode 100644 index 38738e2..0000000 --- hbase-server/src/main/protobuf/MasterMonitor.proto +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for MasterMonitorProtocol. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "MasterMonitorProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; -import "ClusterStatus.proto"; - -message GetSchemaAlterStatusRequest { - required bytes tableName = 1; -} - -message GetSchemaAlterStatusResponse { - optional uint32 yetToUpdateRegions = 1; - optional uint32 totalRegions = 2; -} - -message GetTableDescriptorsRequest { - repeated string tableNames = 1; -} - -message GetTableDescriptorsResponse { - repeated TableSchema tableSchema = 1; -} - -message GetClusterStatusRequest { -} - -message GetClusterStatusResponse { - required ClusterStatus clusterStatus = 1; -} - -service MasterMonitorService { - /** Used by the client to get the number of regions that have received the updated schema */ - rpc getSchemaAlterStatus(GetSchemaAlterStatusRequest) - returns(GetSchemaAlterStatusResponse); - - /** Get list of TableDescriptors for requested tables. */ - rpc getTableDescriptors(GetTableDescriptorsRequest) - returns(GetTableDescriptorsResponse); - - /** Return cluster status. */ - rpc getClusterStatus(GetClusterStatusRequest) - returns(GetClusterStatusResponse); -} diff --git hbase-server/src/main/protobuf/MultiRowMutation.proto hbase-server/src/main/protobuf/MultiRowMutation.proto deleted file mode 100644 index 1aaefee..0000000 --- hbase-server/src/main/protobuf/MultiRowMutation.proto +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import "Client.proto"; -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "MultiRowMutation"; -option java_generate_equals_and_hash = true; -option java_generic_services = true; -option optimize_for = SPEED; - -message MultiMutateRequest { - repeated Mutate mutationRequest = 1; -} - -message MultiMutateResponse { -} - -service MultiRowMutationService { - rpc mutateRows(MultiMutateRequest) - returns(MultiMutateResponse); -} diff --git hbase-server/src/main/protobuf/README.txt hbase-server/src/main/protobuf/README.txt deleted file mode 100644 index f979619..0000000 --- hbase-server/src/main/protobuf/README.txt +++ /dev/null @@ -1,27 +0,0 @@ -These are the protobuf definition files used by hbase. The produced java -classes are generated into src/main/java/org/apache/hadoop/hbase/protobuf/generated -and then checked in. The reasoning is that they change infrequently. - -To regnerate the classes after making definition file changes, ensure first that -the protobuf protoc tool is in your $PATH (You may need to download it and build -it first; its part of the protobuf package obtainable from here: -http://code.google.com/p/protobuf/downloads/list). Then run the following (You -should be able to just copy and paste the below into a terminal and hit return --- the protoc compiler runs fast): - - UNIX_PROTO_DIR=src/main/protobuf - JAVA_DIR=src/main/java/ - mkdir -p $JAVA_DIR 2> /dev/null - if which cygpath 2> /dev/null; then - PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR` - JAVA_DIR=`cygpath --windows $JAVA_DIR` - else - PROTO_DIR=$UNIX_PROTO_DIR - fi - for PROTO_FILE in $UNIX_PROTO_DIR/*.proto - do - protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE - done - -After you've done the above, check it in and then check it in (or post a patch -on a JIRA with your definition file changes and the generated files). diff --git hbase-server/src/main/protobuf/RPC.proto hbase-server/src/main/protobuf/RPC.proto deleted file mode 100644 index bdd31d2..0000000 --- hbase-server/src/main/protobuf/RPC.proto +++ /dev/null @@ -1,140 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Specification of (unsecure) HBase RPC: - * - * Client needs to set up a connection first to a server serving a certain - * HBase protocol (like ClientProtocol). Once the connection is set up, the - * client and server communicates on that channel for RPC requests/responses. - * The sections below describe the flow. - * - * As part of setting up a connection to a server, the client needs to send - * the ConnectionHeader header. At the data level, this looks like - * <"hrpc"-bytearray><'5'[byte]> - * - * For every RPC that the client makes it needs to send the following - * RpcRequestHeader and the RpcRequestBody. At the data level this looks like: - * - * - * - * - * On a success, the server's protobuf response looks like - * - * - * On a failure, the server's protobuf response looks like - * - * - * - * There is one special message that's sent from client to server - - * the Ping message. At the data level, this is just the bytes corresponding - * to integer -1. - */ - -import "Tracing.proto"; - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "RPCProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -message UserInformation { - required string effectiveUser = 1; - optional string realUser = 2; -} - -message ConnectionHeader { - /** User Info beyond what is established at connection establishment - * (applies to secure HBase setup) - */ - optional UserInformation userInfo = 1; - /** Protocol name for next rpc layer - * the client created a proxy with this protocol name - */ - optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; -} - - -/** - * The RPC request header - */ -message RpcRequestHeader { - /** Monotonically increasing callId, mostly to keep track of RPCs */ - required uint32 callId = 1; - optional RPCTInfo tinfo = 2; -} -/** - * The RPC request body - */ -message RpcRequestBody { - /** Name of the RPC method */ - required string methodName = 1; - - /** protocol version of class declaring the called method */ - optional uint64 clientProtocolVersion = 2; - - /** Bytes corresponding to the client protobuf request. This is the actual - * bytes corresponding to the RPC request argument. - */ - optional bytes request = 3; - - /** Some metainfo about the request. Helps us to treat RPCs with - * different priorities. For now this is just the classname of the request - * proto object. - */ - optional string requestClassName = 4; -} - -/** - * The RPC response header - */ -message RpcResponseHeader { - /** Echo back the callId the client sent */ - required uint32 callId = 1; - /** Did the RPC execution encounter an error at the server */ - enum Status { - SUCCESS = 0; - ERROR = 1; - FATAL = 2; - } - required Status status = 2; -} -/** - * The RPC response body - */ -message RpcResponseBody { - /** Optional response bytes. This is the actual bytes corresponding to the - * return value of the invoked RPC. - */ - optional bytes response = 1; -} -/** - * At the RPC layer, this message is used to indicate - * the server side exception to the RPC client. - * - * HBase RPC client throws an exception indicated - * by exceptionName with the stackTrace. - */ -message RpcException { - /** Class name of the exception thrown from the server */ - required string exceptionName = 1; - - /** Exception stack trace from the server side */ - optional string stackTrace = 2; -} diff --git hbase-server/src/main/protobuf/RegionServerStatus.proto hbase-server/src/main/protobuf/RegionServerStatus.proto deleted file mode 100644 index 37c2c63..0000000 --- hbase-server/src/main/protobuf/RegionServerStatus.proto +++ /dev/null @@ -1,101 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for RegionServerStatusProtocol. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "RegionServerStatusProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; - -message RegionServerStartupRequest { - /** Port number this regionserver is up on */ - required uint32 port = 1; - - /** This servers' startcode */ - required uint64 serverStartCode = 2; - - /** Current time of the region server in ms */ - required uint64 serverCurrentTime = 3; -} - -message RegionServerStartupResponse { - /** - * Configuration for the regionserver to use: e.g. filesystem, - * hbase rootdir, the hostname to use creating the RegionServer ServerName, - * etc - */ - repeated NameStringPair mapEntries = 1; -} - -message RegionServerReportRequest { - required ServerName server = 1; - - /** load the server is under */ - optional ServerLoad load = 2; -} - -message RegionServerReportResponse { -} - -message ReportRSFatalErrorRequest { - /** name of the server experiencing the error */ - required ServerName server = 1; - - /** informative text to expose in the master logs and UI */ - required string errorMessage = 2; -} - -message ReportRSFatalErrorResponse { -} - -message GetLastFlushedSequenceIdRequest { - /** region name */ - required bytes regionName = 1; -} - -message GetLastFlushedSequenceIdResponse { - /** the last HLog sequence id flushed from MemStore to HFile for the region */ - required uint64 lastFlushedSequenceId = 1; -} - -service RegionServerStatusService { - /** Called when a region server first starts. */ - rpc regionServerStartup(RegionServerStartupRequest) - returns(RegionServerStartupResponse); - - /** Called to report the load the RegionServer is under. */ - rpc regionServerReport(RegionServerReportRequest) - returns(RegionServerReportResponse); - - /** - * Called by a region server to report a fatal error that is causing it to - * abort. - */ - rpc reportRSFatalError(ReportRSFatalErrorRequest) - returns(ReportRSFatalErrorResponse); - - /** Called to get the sequence id of the last MemStore entry flushed to an - * HFile for a specified region. Used by the region server to speed up - * log splitting. */ - rpc getLastFlushedSequenceId(GetLastFlushedSequenceIdRequest) - returns(GetLastFlushedSequenceIdResponse); -} diff --git hbase-server/src/main/protobuf/Tracing.proto hbase-server/src/main/protobuf/Tracing.proto deleted file mode 100644 index 8e5babd..0000000 --- hbase-server/src/main/protobuf/Tracing.proto +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "Tracing"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -//Used to pass through the information necessary to continue -//a trace after an RPC is made. All we need is the traceid -//(so we know the overarching trace this message is a part of), and -//the id of the current span when this message was sent, so we know -//what span caused the new span we will create when this message is received. -message RPCTInfo { - optional int64 traceId = 1; - optional int64 parentId = 2; -} diff --git hbase-server/src/main/protobuf/ZooKeeper.proto hbase-server/src/main/protobuf/ZooKeeper.proto deleted file mode 100644 index 69a5c01..0000000 --- hbase-server/src/main/protobuf/ZooKeeper.proto +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// ZNode data in hbase are serialized protobufs with a four byte -// 'magic' 'PBUF' prefix. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "ZooKeeperProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; - -/** - * Content of the root-region-server znode. - */ -message RootRegionServer { - // The ServerName hosting the root region currently. - required ServerName server = 1; -} - -/** - * Content of the master znode. - */ -message Master { - // The ServerName of the current Master - required ServerName master = 1; -} - -/** - * Content of the '/hbase/shutdown', cluster state, znode. - */ -message ClusterUp { - // If this znode is present, cluster is up. Currently - // the data is cluster startDate. - required string startDate = 1; -} - -/** - * What we write under unassigned up in zookeeper as a region moves through - * open/close, etc., regions. Details a region in transition. - */ -message RegionTransition { - // Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode() - required uint32 eventTypeCode = 1; - // Full regionname in bytes - required bytes regionName = 2; - required uint64 createTime = 3; - // The region server where the transition will happen or is happening - required ServerName serverName = 4; - optional bytes payload = 5; -} - -/** - * WAL SplitLog directory znodes have this for content. Used doing distributed - * WAL splitting. Holds current state and name of server that originated split. - */ -message SplitLogTask { - enum State { - UNASSIGNED = 0; - OWNED = 1; - RESIGNED = 2; - DONE = 3; - ERR = 4; - } - required State state = 1; - required ServerName serverName = 2; -} - -/** - * The znode that holds state of table. - */ -message Table { - // Table's current state - enum State { - ENABLED = 0; - DISABLED = 1; - DISABLING = 2; - ENABLING = 3; - } - // This is the table's state. If no znode for a table, - // its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class - // for more. - required State state = 1 [default = ENABLED]; -} - -/** - * Used by replication. Holds a replication peer key. - */ -message ReplicationPeer { - // clusterKey is the concatenation of the slave cluster's - // hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent - required string clusterkey = 1; -} - -/** - * Used by replication. Holds whether enabled or disabled - */ -message ReplicationState { - enum State { - ENABLED = 0; - DISABLED = 1; - } - required State state = 1; -} - -/** - * Used by replication. Holds the current position in an HLog file. - */ -message ReplicationHLogPosition { - required int64 position = 1; -} - -/** - * Used by replication. Used to lock a region server during failover. - */ -message ReplicationLock { - required string lockOwner = 1; -} diff --git hbase-server/src/main/protobuf/hbase.proto hbase-server/src/main/protobuf/hbase.proto deleted file mode 100644 index f6bc51e..0000000 --- hbase-server/src/main/protobuf/hbase.proto +++ /dev/null @@ -1,268 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are shared throughout HBase - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "HBaseProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -/** - * Table Schema - * Inspired by the rest TableSchema - */ -message TableSchema { - optional bytes name = 1; - message Attribute { - required bytes name = 1; - required bytes value = 2; - } - repeated Attribute attributes = 2; - repeated ColumnFamilySchema columnFamilies = 3; -} - -/** - * Column Family Schema - * Inspired by the rest ColumSchemaMessage - */ -message ColumnFamilySchema { - required bytes name = 1; - message Attribute { - required bytes name = 1; - required bytes value = 2; - } - repeated Attribute attributes = 2; -} - -/** - * Protocol buffer version of HRegionInfo. - */ -message RegionInfo { - required uint64 regionId = 1; - required bytes tableName = 2; - optional bytes startKey = 3; - optional bytes endKey = 4; - optional bool offline = 5; - optional bool split = 6; -} - -/** - * Container protocol buffer to specify a region. - * You can specify region by region name, or the hash - * of the region name, which is known as encoded - * region name. - */ -message RegionSpecifier { - required RegionSpecifierType type = 1; - required bytes value = 2; - - enum RegionSpecifierType { - // ,,. - REGION_NAME = 1; - - // hash of ,, - ENCODED_REGION_NAME = 2; - } -} - -message RegionLoad { - /** the region specifier */ - required RegionSpecifier regionSpecifier = 1; - - /** the number of stores for the region */ - optional uint32 stores = 2; - - /** the number of storefiles for the region */ - optional uint32 storefiles = 3; - - /** the total size of the store files for the region, uncompressed, in MB */ - optional uint32 storeUncompressedSizeMB = 4; - - /** the current total size of the store files for the region, in MB */ - optional uint32 storefileSizeMB = 5; - - /** the current size of the memstore for the region, in MB */ - optional uint32 memstoreSizeMB = 6; - - /** - * The current total size of root-level store file indexes for the region, - * in MB. The same as {@link #rootIndexSizeKB} but in MB. - */ - optional uint32 storefileIndexSizeMB = 7; - - /** the current total read requests made to region */ - optional uint64 readRequestsCount = 8; - - /** the current total write requests made to region */ - optional uint64 writeRequestsCount = 9; - - /** the total compacting key values in currently running compaction */ - optional uint64 totalCompactingKVs = 10; - - /** the completed count of key values in currently running compaction */ - optional uint64 currentCompactedKVs = 11; - - /** The current total size of root-level indexes for the region, in KB. */ - optional uint32 rootIndexSizeKB = 12; - - /** The total size of all index blocks, not just the root level, in KB. */ - optional uint32 totalStaticIndexSizeKB = 13; - - /** - * The total size of all Bloom filter blocks, not just loaded into the - * block cache, in KB. - */ - optional uint32 totalStaticBloomSizeKB = 14; - - /** Region-level coprocessors. */ - repeated Coprocessor coprocessors = 15; - - /** the most recent sequence Id from cache flush */ - optional uint64 completeSequenceId = 16; -} - -/* Server-level protobufs */ - -message ServerLoad { - /** Number of requests since last report. */ - optional uint32 numberOfRequests = 1; - - /** Total Number of requests from the start of the region server. */ - optional uint32 totalNumberOfRequests = 2; - - /** the amount of used heap, in MB. */ - optional uint32 usedHeapMB = 3; - - /** the maximum allowable size of the heap, in MB. */ - optional uint32 maxHeapMB = 4; - - /** Information on the load of individual regions. */ - repeated RegionLoad regionLoads = 5; - - /** - * Regionserver-level coprocessors, e.g., WALObserver implementations. - * Region-level coprocessors, on the other hand, are stored inside RegionLoad - * objects. - */ - repeated Coprocessor coprocessors = 6; - - /** - * Time when incremental (non-total) counts began being calculated (e.g. numberOfRequests) - * time is measured as the difference, measured in milliseconds, between the current time - * and midnight, January 1, 1970 UTC. - */ - optional uint64 reportStartTime = 7; - - /** - * Time when report was generated. - * time is measured as the difference, measured in milliseconds, between the current time - * and midnight, January 1, 1970 UTC. - */ - optional uint64 reportEndTime = 8; - - /** - * The port number that this region server is hosing an info server on. - */ - optional uint32 infoServerPort = 9; -} - -/** - * A range of time. Both from and to are Java time - * stamp in milliseconds. If you don't specify a time - * range, it means all time. By default, if not - * specified, from = 0, and to = Long.MAX_VALUE - */ -message TimeRange { - optional uint64 from = 1; - optional uint64 to = 2; -} - -message Filter { - required string name = 1; - optional bytes serializedFilter = 2; -} - -/* Comparison operators */ -enum CompareType { - LESS = 0; - LESS_OR_EQUAL = 1; - EQUAL = 2; - NOT_EQUAL = 3; - GREATER_OR_EQUAL = 4; - GREATER = 5; - NO_OP = 6; -} - -/** - * The type of the key in a KeyValue. - */ -enum KeyType { - MINIMUM = 0; - PUT = 4; - - DELETE = 8; - DELETE_COLUMN = 12; - DELETE_FAMILY = 14; - - // MAXIMUM is used when searching; you look from maximum on down. - MAXIMUM = 255; -} - -/** - * Protocol buffer version of KeyValue. - * It doesn't have those transient parameters - */ -message KeyValue { - required bytes row = 1; - required bytes family = 2; - required bytes qualifier = 3; - optional uint64 timestamp = 4; - optional KeyType keyType = 5; - optional bytes value = 6; -} - -/** - * Protocol buffer version of ServerName - */ -message ServerName { - required string hostName = 1; - optional uint32 port = 2; - optional uint64 startCode = 3; -} - -// Comment data structures - -message Coprocessor { - required string name = 1; -} - -message NameStringPair { - required string name = 1; - required string value = 2; -} - -message NameBytesPair { - required string name = 1; - optional bytes value = 2; -} - -message BytesBytesPair { - required bytes first = 1; - required bytes second = 2; -} diff --git pom.xml pom.xml index 39dafcf..86e60a0 100644 --- pom.xml +++ pom.xml @@ -50,6 +50,7 @@ http://hbase.apache.org hbase-server + hbase-rpc hbase-hadoop2-compat hbase-hadoop1-compat hbase-hadoop-compat @@ -917,6 +918,11 @@ org.apache.hbase + hbase-rpc + ${project.version} + + + org.apache.hbase hbase-hadoop-compat ${project.version}