diff --git hbase-rpc/pom.xml hbase-rpc/pom.xml
new file mode 100644
index 0000000..a699be4
--- /dev/null
+++ hbase-rpc/pom.xml
@@ -0,0 +1,85 @@
+
+
+ 4.0.0
+
+ hbase
+ org.apache.hbase
+ 0.95-SNAPSHOT
+ ..
+
+
+ hbase-rpc
+ HBase - RPC
+ RPC Classes of HBase
+
+
+
+
+ maven-surefire-plugin
+
+
+
+ listener
+ org.apache.hadoop.hbase.ResourceCheckerJUnitListener
+
+
+
+
+
+
+ secondPartTestsExecution
+ test
+
+ test
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+ com.google.protobuf
+ protobuf-java
+
+
+
+
+
+
+ skip-rpc-tests
+
+
+ skip-rpc-tests
+
+
+
+ true
+
+
+
+
\ No newline at end of file
diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
new file mode 100644
index 0000000..b246e53
--- /dev/null
+++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
@@ -0,0 +1,6553 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: AccessControl.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class AccessControlProtos {
+ private AccessControlProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface PermissionOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .Permission.Action action = 1;
+ java.util.List getActionList();
+ int getActionCount();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index);
+
+ // optional bytes table = 2;
+ boolean hasTable();
+ com.google.protobuf.ByteString getTable();
+
+ // optional bytes family = 3;
+ boolean hasFamily();
+ com.google.protobuf.ByteString getFamily();
+
+ // optional bytes qualifier = 4;
+ boolean hasQualifier();
+ com.google.protobuf.ByteString getQualifier();
+ }
+ public static final class Permission extends
+ com.google.protobuf.GeneratedMessage
+ implements PermissionOrBuilder {
+ // Use Permission.newBuilder() to construct.
+ private Permission(Builder builder) {
+ super(builder);
+ }
+ private Permission(boolean noInit) {}
+
+ private static final Permission defaultInstance;
+ public static Permission getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Permission getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable;
+ }
+
+ public enum Action
+ implements com.google.protobuf.ProtocolMessageEnum {
+ READ(0, 0),
+ WRITE(1, 1),
+ EXEC(2, 2),
+ CREATE(3, 3),
+ ADMIN(4, 4),
+ ;
+
+ public static final int READ_VALUE = 0;
+ public static final int WRITE_VALUE = 1;
+ public static final int EXEC_VALUE = 2;
+ public static final int CREATE_VALUE = 3;
+ public static final int ADMIN_VALUE = 4;
+
+
+ public final int getNumber() { return value; }
+
+ public static Action valueOf(int value) {
+ switch (value) {
+ case 0: return READ;
+ case 1: return WRITE;
+ case 2: return EXEC;
+ case 3: return CREATE;
+ case 4: return ADMIN;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public Action findValueByNumber(int number) {
+ return Action.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final Action[] VALUES = {
+ READ, WRITE, EXEC, CREATE, ADMIN,
+ };
+
+ public static Action valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private Action(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:Permission.Action)
+ }
+
+ private int bitField0_;
+ // repeated .Permission.Action action = 1;
+ public static final int ACTION_FIELD_NUMBER = 1;
+ private java.util.List action_;
+ public java.util.List getActionList() {
+ return action_;
+ }
+ public int getActionCount() {
+ return action_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) {
+ return action_.get(index);
+ }
+
+ // optional bytes table = 2;
+ public static final int TABLE_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString table_;
+ public boolean hasTable() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getTable() {
+ return table_;
+ }
+
+ // optional bytes family = 3;
+ public static final int FAMILY_FIELD_NUMBER = 3;
+ private com.google.protobuf.ByteString family_;
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+
+ // optional bytes qualifier = 4;
+ public static final int QUALIFIER_FIELD_NUMBER = 4;
+ private com.google.protobuf.ByteString qualifier_;
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+
+ private void initFields() {
+ action_ = java.util.Collections.emptyList();
+ table_ = com.google.protobuf.ByteString.EMPTY;
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < action_.size(); i++) {
+ output.writeEnum(1, action_.get(i).getNumber());
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(2, table_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(3, family_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(4, qualifier_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < action_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeEnumSizeNoTag(action_.get(i).getNumber());
+ }
+ size += dataSize;
+ size += 1 * action_.size();
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, table_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, family_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(4, qualifier_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) obj;
+
+ boolean result = true;
+ result = result && getActionList()
+ .equals(other.getActionList());
+ result = result && (hasTable() == other.hasTable());
+ if (hasTable()) {
+ result = result && getTable()
+ .equals(other.getTable());
+ }
+ result = result && (hasFamily() == other.hasFamily());
+ if (hasFamily()) {
+ result = result && getFamily()
+ .equals(other.getFamily());
+ }
+ result = result && (hasQualifier() == other.hasQualifier());
+ if (hasQualifier()) {
+ result = result && getQualifier()
+ .equals(other.getQualifier());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getActionCount() > 0) {
+ hash = (37 * hash) + ACTION_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnumList(getActionList());
+ }
+ if (hasTable()) {
+ hash = (37 * hash) + TABLE_FIELD_NUMBER;
+ hash = (53 * hash) + getTable().hashCode();
+ }
+ if (hasFamily()) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamily().hashCode();
+ }
+ if (hasQualifier()) {
+ hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getQualifier().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_Permission_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ action_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ table_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ action_ = java.util.Collections.unmodifiableList(action_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.action_ = action_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.table_ = table_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.family_ = family_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.qualifier_ = qualifier_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) return this;
+ if (!other.action_.isEmpty()) {
+ if (action_.isEmpty()) {
+ action_ = other.action_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureActionIsMutable();
+ action_.addAll(other.action_);
+ }
+ onChanged();
+ }
+ if (other.hasTable()) {
+ setTable(other.getTable());
+ }
+ if (other.hasFamily()) {
+ setFamily(other.getFamily());
+ }
+ if (other.hasQualifier()) {
+ setQualifier(other.getQualifier());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ addAction(value);
+ }
+ break;
+ }
+ case 10: {
+ int length = input.readRawVarint32();
+ int oldLimit = input.pushLimit(length);
+ while(input.getBytesUntilLimit() > 0) {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ addAction(value);
+ }
+ }
+ input.popLimit(oldLimit);
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ table_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ family_ = input.readBytes();
+ break;
+ }
+ case 34: {
+ bitField0_ |= 0x00000008;
+ qualifier_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .Permission.Action action = 1;
+ private java.util.List action_ =
+ java.util.Collections.emptyList();
+ private void ensureActionIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ action_ = new java.util.ArrayList(action_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List getActionList() {
+ return java.util.Collections.unmodifiableList(action_);
+ }
+ public int getActionCount() {
+ return action_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) {
+ return action_.get(index);
+ }
+ public Builder setAction(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureActionIsMutable();
+ action_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureActionIsMutable();
+ action_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllAction(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Action> values) {
+ ensureActionIsMutable();
+ super.addAll(values, action_);
+ onChanged();
+ return this;
+ }
+ public Builder clearAction() {
+ action_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // optional bytes table = 2;
+ private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasTable() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getTable() {
+ return table_;
+ }
+ public Builder setTable(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ table_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearTable() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ table_ = getDefaultInstance().getTable();
+ onChanged();
+ return this;
+ }
+
+ // optional bytes family = 3;
+ private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+ public Builder setFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ family_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearFamily() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ family_ = getDefaultInstance().getFamily();
+ onChanged();
+ return this;
+ }
+
+ // optional bytes qualifier = 4;
+ private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+ public Builder setQualifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ qualifier_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearQualifier() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ qualifier_ = getDefaultInstance().getQualifier();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:Permission)
+ }
+
+ static {
+ defaultInstance = new Permission(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:Permission)
+ }
+
+ public interface UserPermissionOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes user = 1;
+ boolean hasUser();
+ com.google.protobuf.ByteString getUser();
+
+ // required .Permission permission = 2;
+ boolean hasPermission();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder();
+ }
+ public static final class UserPermission extends
+ com.google.protobuf.GeneratedMessage
+ implements UserPermissionOrBuilder {
+ // Use UserPermission.newBuilder() to construct.
+ private UserPermission(Builder builder) {
+ super(builder);
+ }
+ private UserPermission(boolean noInit) {}
+
+ private static final UserPermission defaultInstance;
+ public static UserPermission getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public UserPermission getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bytes user = 1;
+ public static final int USER_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString user_;
+ public boolean hasUser() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getUser() {
+ return user_;
+ }
+
+ // required .Permission permission = 2;
+ public static final int PERMISSION_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_;
+ public boolean hasPermission() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() {
+ return permission_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() {
+ return permission_;
+ }
+
+ private void initFields() {
+ user_ = com.google.protobuf.ByteString.EMPTY;
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasUser()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasPermission()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, user_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, permission_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, user_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, permission_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) obj;
+
+ boolean result = true;
+ result = result && (hasUser() == other.hasUser());
+ if (hasUser()) {
+ result = result && getUser()
+ .equals(other.getUser());
+ }
+ result = result && (hasPermission() == other.hasPermission());
+ if (hasPermission()) {
+ result = result && getPermission()
+ .equals(other.getPermission());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasUser()) {
+ hash = (37 * hash) + USER_FIELD_NUMBER;
+ hash = (53 * hash) + getUser().hashCode();
+ }
+ if (hasPermission()) {
+ hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
+ hash = (53 * hash) + getPermission().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermission_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ user_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (permissionBuilder_ == null) {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance();
+ } else {
+ permissionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.user_ = user_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (permissionBuilder_ == null) {
+ result.permission_ = permission_;
+ } else {
+ result.permission_ = permissionBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) return this;
+ if (other.hasUser()) {
+ setUser(other.getUser());
+ }
+ if (other.hasPermission()) {
+ mergePermission(other.getPermission());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasUser()) {
+
+ return false;
+ }
+ if (!hasPermission()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ user_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder();
+ if (hasPermission()) {
+ subBuilder.mergeFrom(getPermission());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setPermission(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bytes user = 1;
+ private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasUser() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getUser() {
+ return user_;
+ }
+ public Builder setUser(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ user_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearUser() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ user_ = getDefaultInstance().getUser();
+ onChanged();
+ return this;
+ }
+
+ // required .Permission permission = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_;
+ public boolean hasPermission() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission() {
+ if (permissionBuilder_ == null) {
+ return permission_;
+ } else {
+ return permissionBuilder_.getMessage();
+ }
+ }
+ public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ permission_ = value;
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder setPermission(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ permission_ = builderForValue.build();
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) {
+ permission_ =
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder(permission_).mergeFrom(value).buildPartial();
+ } else {
+ permission_ = value;
+ }
+ onChanged();
+ } else {
+ permissionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder clearPermission() {
+ if (permissionBuilder_ == null) {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance();
+ onChanged();
+ } else {
+ permissionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getPermissionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder() {
+ if (permissionBuilder_ != null) {
+ return permissionBuilder_.getMessageOrBuilder();
+ } else {
+ return permission_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionFieldBuilder() {
+ if (permissionBuilder_ == null) {
+ permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>(
+ permission_,
+ getParentForChildren(),
+ isClean());
+ permission_ = null;
+ }
+ return permissionBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:UserPermission)
+ }
+
+ static {
+ defaultInstance = new UserPermission(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:UserPermission)
+ }
+
+ public interface UserTablePermissionsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .UserTablePermissions.UserPermissions permissions = 1;
+ java.util.List
+ getPermissionsList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index);
+ int getPermissionsCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>
+ getPermissionsOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder(
+ int index);
+ }
+ public static final class UserTablePermissions extends
+ com.google.protobuf.GeneratedMessage
+ implements UserTablePermissionsOrBuilder {
+ // Use UserTablePermissions.newBuilder() to construct.
+ private UserTablePermissions(Builder builder) {
+ super(builder);
+ }
+ private UserTablePermissions(boolean noInit) {}
+
+ private static final UserTablePermissions defaultInstance;
+ public static UserTablePermissions getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public UserTablePermissions getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable;
+ }
+
+ public interface UserPermissionsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes user = 1;
+ boolean hasUser();
+ com.google.protobuf.ByteString getUser();
+
+ // repeated .Permission permissions = 2;
+ java.util.List
+ getPermissionsList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index);
+ int getPermissionsCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionsOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder(
+ int index);
+ }
+ public static final class UserPermissions extends
+ com.google.protobuf.GeneratedMessage
+ implements UserPermissionsOrBuilder {
+ // Use UserPermissions.newBuilder() to construct.
+ private UserPermissions(Builder builder) {
+ super(builder);
+ }
+ private UserPermissions(boolean noInit) {}
+
+ private static final UserPermissions defaultInstance;
+ public static UserPermissions getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public UserPermissions getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bytes user = 1;
+ public static final int USER_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString user_;
+ public boolean hasUser() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getUser() {
+ return user_;
+ }
+
+ // repeated .Permission permissions = 2;
+ public static final int PERMISSIONS_FIELD_NUMBER = 2;
+ private java.util.List permissions_;
+ public java.util.List getPermissionsList() {
+ return permissions_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionsOrBuilderList() {
+ return permissions_;
+ }
+ public int getPermissionsCount() {
+ return permissions_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) {
+ return permissions_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder(
+ int index) {
+ return permissions_.get(index);
+ }
+
+ private void initFields() {
+ user_ = com.google.protobuf.ByteString.EMPTY;
+ permissions_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasUser()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, user_);
+ }
+ for (int i = 0; i < permissions_.size(); i++) {
+ output.writeMessage(2, permissions_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, user_);
+ }
+ for (int i = 0; i < permissions_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, permissions_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) obj;
+
+ boolean result = true;
+ result = result && (hasUser() == other.hasUser());
+ if (hasUser()) {
+ result = result && getUser()
+ .equals(other.getUser());
+ }
+ result = result && getPermissionsList()
+ .equals(other.getPermissionsList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasUser()) {
+ hash = (37 * hash) + USER_FIELD_NUMBER;
+ hash = (53 * hash) + getUser().hashCode();
+ }
+ if (getPermissionsCount() > 0) {
+ hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER;
+ hash = (53 * hash) + getPermissionsList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionsFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ user_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (permissionsBuilder_ == null) {
+ permissions_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ permissionsBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.user_ = user_;
+ if (permissionsBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ permissions_ = java.util.Collections.unmodifiableList(permissions_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.permissions_ = permissions_;
+ } else {
+ result.permissions_ = permissionsBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance()) return this;
+ if (other.hasUser()) {
+ setUser(other.getUser());
+ }
+ if (permissionsBuilder_ == null) {
+ if (!other.permissions_.isEmpty()) {
+ if (permissions_.isEmpty()) {
+ permissions_ = other.permissions_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensurePermissionsIsMutable();
+ permissions_.addAll(other.permissions_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.permissions_.isEmpty()) {
+ if (permissionsBuilder_.isEmpty()) {
+ permissionsBuilder_.dispose();
+ permissionsBuilder_ = null;
+ permissions_ = other.permissions_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ permissionsBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getPermissionsFieldBuilder() : null;
+ } else {
+ permissionsBuilder_.addAllMessages(other.permissions_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasUser()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ user_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addPermissions(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bytes user = 1;
+ private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasUser() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getUser() {
+ return user_;
+ }
+ public Builder setUser(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ user_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearUser() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ user_ = getDefaultInstance().getUser();
+ onChanged();
+ return this;
+ }
+
+ // repeated .Permission permissions = 2;
+ private java.util.List permissions_ =
+ java.util.Collections.emptyList();
+ private void ensurePermissionsIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ permissions_ = new java.util.ArrayList(permissions_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionsBuilder_;
+
+ public java.util.List getPermissionsList() {
+ if (permissionsBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(permissions_);
+ } else {
+ return permissionsBuilder_.getMessageList();
+ }
+ }
+ public int getPermissionsCount() {
+ if (permissionsBuilder_ == null) {
+ return permissions_.size();
+ } else {
+ return permissionsBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermissions(int index) {
+ if (permissionsBuilder_ == null) {
+ return permissions_.get(index);
+ } else {
+ return permissionsBuilder_.getMessage(index);
+ }
+ }
+ public Builder setPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionsIsMutable();
+ permissions_.set(index, value);
+ onChanged();
+ } else {
+ permissionsBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionsBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionsIsMutable();
+ permissions_.add(value);
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionsIsMutable();
+ permissions_.add(index, value);
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addPermissions(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.add(builderForValue.build());
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllPermissions(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission> values) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ super.addAll(values, permissions_);
+ onChanged();
+ } else {
+ permissionsBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearPermissions() {
+ if (permissionsBuilder_ == null) {
+ permissions_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ } else {
+ permissionsBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removePermissions(int index) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.remove(index);
+ onChanged();
+ } else {
+ permissionsBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionsBuilder(
+ int index) {
+ return getPermissionsFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionsOrBuilder(
+ int index) {
+ if (permissionsBuilder_ == null) {
+ return permissions_.get(index); } else {
+ return permissionsBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionsOrBuilderList() {
+ if (permissionsBuilder_ != null) {
+ return permissionsBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(permissions_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder() {
+ return getPermissionsFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionsBuilder(
+ int index) {
+ return getPermissionsFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance());
+ }
+ public java.util.List
+ getPermissionsBuilderList() {
+ return getPermissionsFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionsFieldBuilder() {
+ if (permissionsBuilder_ == null) {
+ permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>(
+ permissions_,
+ ((bitField0_ & 0x00000002) == 0x00000002),
+ getParentForChildren(),
+ isClean());
+ permissions_ = null;
+ }
+ return permissionsBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:UserTablePermissions.UserPermissions)
+ }
+
+ static {
+ defaultInstance = new UserPermissions(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:UserTablePermissions.UserPermissions)
+ }
+
+ // repeated .UserTablePermissions.UserPermissions permissions = 1;
+ public static final int PERMISSIONS_FIELD_NUMBER = 1;
+ private java.util.List permissions_;
+ public java.util.List getPermissionsList() {
+ return permissions_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>
+ getPermissionsOrBuilderList() {
+ return permissions_;
+ }
+ public int getPermissionsCount() {
+ return permissions_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) {
+ return permissions_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder(
+ int index) {
+ return permissions_.get(index);
+ }
+
+ private void initFields() {
+ permissions_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getPermissionsCount(); i++) {
+ if (!getPermissions(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < permissions_.size(); i++) {
+ output.writeMessage(1, permissions_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < permissions_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, permissions_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) obj;
+
+ boolean result = true;
+ result = result && getPermissionsList()
+ .equals(other.getPermissionsList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getPermissionsCount() > 0) {
+ hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER;
+ hash = (53 * hash) + getPermissionsList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissionsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserTablePermissions_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionsFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (permissionsBuilder_ == null) {
+ permissions_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ permissionsBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions(this);
+ int from_bitField0_ = bitField0_;
+ if (permissionsBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ permissions_ = java.util.Collections.unmodifiableList(permissions_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.permissions_ = permissions_;
+ } else {
+ result.permissions_ = permissionsBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.getDefaultInstance()) return this;
+ if (permissionsBuilder_ == null) {
+ if (!other.permissions_.isEmpty()) {
+ if (permissions_.isEmpty()) {
+ permissions_ = other.permissions_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensurePermissionsIsMutable();
+ permissions_.addAll(other.permissions_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.permissions_.isEmpty()) {
+ if (permissionsBuilder_.isEmpty()) {
+ permissionsBuilder_.dispose();
+ permissionsBuilder_ = null;
+ permissions_ = other.permissions_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ permissionsBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getPermissionsFieldBuilder() : null;
+ } else {
+ permissionsBuilder_.addAllMessages(other.permissions_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getPermissionsCount(); i++) {
+ if (!getPermissions(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addPermissions(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .UserTablePermissions.UserPermissions permissions = 1;
+ private java.util.List permissions_ =
+ java.util.Collections.emptyList();
+ private void ensurePermissionsIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ permissions_ = new java.util.ArrayList(permissions_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder> permissionsBuilder_;
+
+ public java.util.List getPermissionsList() {
+ if (permissionsBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(permissions_);
+ } else {
+ return permissionsBuilder_.getMessageList();
+ }
+ }
+ public int getPermissionsCount() {
+ if (permissionsBuilder_ == null) {
+ return permissions_.size();
+ } else {
+ return permissionsBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions getPermissions(int index) {
+ if (permissionsBuilder_ == null) {
+ return permissions_.get(index);
+ } else {
+ return permissionsBuilder_.getMessage(index);
+ }
+ }
+ public Builder setPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) {
+ if (permissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionsIsMutable();
+ permissions_.set(index, value);
+ onChanged();
+ } else {
+ permissionsBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionsBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) {
+ if (permissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionsIsMutable();
+ permissions_.add(value);
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions value) {
+ if (permissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionsIsMutable();
+ permissions_.add(index, value);
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addPermissions(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.add(builderForValue.build());
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermissions(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder builderForValue) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionsBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllPermissions(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions> values) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ super.addAll(values, permissions_);
+ onChanged();
+ } else {
+ permissionsBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearPermissions() {
+ if (permissionsBuilder_ == null) {
+ permissions_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ permissionsBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removePermissions(int index) {
+ if (permissionsBuilder_ == null) {
+ ensurePermissionsIsMutable();
+ permissions_.remove(index);
+ onChanged();
+ } else {
+ permissionsBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder getPermissionsBuilder(
+ int index) {
+ return getPermissionsFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder getPermissionsOrBuilder(
+ int index) {
+ if (permissionsBuilder_ == null) {
+ return permissions_.get(index); } else {
+ return permissionsBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>
+ getPermissionsOrBuilderList() {
+ if (permissionsBuilder_ != null) {
+ return permissionsBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(permissions_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder() {
+ return getPermissionsFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder addPermissionsBuilder(
+ int index) {
+ return getPermissionsFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.getDefaultInstance());
+ }
+ public java.util.List
+ getPermissionsBuilderList() {
+ return getPermissionsFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>
+ getPermissionsFieldBuilder() {
+ if (permissionsBuilder_ == null) {
+ permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissionsOrBuilder>(
+ permissions_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ permissions_ = null;
+ }
+ return permissionsBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:UserTablePermissions)
+ }
+
+ static {
+ defaultInstance = new UserTablePermissions(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:UserTablePermissions)
+ }
+
+ public interface GrantRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .UserPermission permission = 1;
+ boolean hasPermission();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder();
+ }
+ public static final class GrantRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GrantRequestOrBuilder {
+ // Use GrantRequest.newBuilder() to construct.
+ private GrantRequest(Builder builder) {
+ super(builder);
+ }
+ private GrantRequest(boolean noInit) {}
+
+ private static final GrantRequest defaultInstance;
+ public static GrantRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GrantRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .UserPermission permission = 1;
+ public static final int PERMISSION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_;
+ public boolean hasPermission() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() {
+ return permission_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() {
+ return permission_;
+ }
+
+ private void initFields() {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasPermission()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getPermission().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, permission_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, permission_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) obj;
+
+ boolean result = true;
+ result = result && (hasPermission() == other.hasPermission());
+ if (hasPermission()) {
+ result = result && getPermission()
+ .equals(other.getPermission());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasPermission()) {
+ hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
+ hash = (53 * hash) + getPermission().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (permissionBuilder_ == null) {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ } else {
+ permissionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (permissionBuilder_ == null) {
+ result.permission_ = permission_;
+ } else {
+ result.permission_ = permissionBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance()) return this;
+ if (other.hasPermission()) {
+ mergePermission(other.getPermission());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasPermission()) {
+
+ return false;
+ }
+ if (!getPermission().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder();
+ if (hasPermission()) {
+ subBuilder.mergeFrom(getPermission());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setPermission(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .UserPermission permission = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_;
+ public boolean hasPermission() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() {
+ if (permissionBuilder_ == null) {
+ return permission_;
+ } else {
+ return permissionBuilder_.getMessage();
+ }
+ }
+ public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ permission_ = value;
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setPermission(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ permission_ = builderForValue.build();
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) {
+ permission_ =
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(permission_).mergeFrom(value).buildPartial();
+ } else {
+ permission_ = value;
+ }
+ onChanged();
+ } else {
+ permissionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearPermission() {
+ if (permissionBuilder_ == null) {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ onChanged();
+ } else {
+ permissionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getPermissionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() {
+ if (permissionBuilder_ != null) {
+ return permissionBuilder_.getMessageOrBuilder();
+ } else {
+ return permission_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ getPermissionFieldBuilder() {
+ if (permissionBuilder_ == null) {
+ permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>(
+ permission_,
+ getParentForChildren(),
+ isClean());
+ permission_ = null;
+ }
+ return permissionBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:GrantRequest)
+ }
+
+ static {
+ defaultInstance = new GrantRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GrantRequest)
+ }
+
+ public interface GrantResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class GrantResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GrantResponseOrBuilder {
+ // Use GrantResponse.newBuilder() to construct.
+ private GrantResponse(Builder builder) {
+ super(builder);
+ }
+ private GrantResponse(boolean noInit) {}
+
+ private static final GrantResponse defaultInstance;
+ public static GrantResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GrantResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GrantResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:GrantResponse)
+ }
+
+ static {
+ defaultInstance = new GrantResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GrantResponse)
+ }
+
+ public interface RevokeRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .UserPermission permission = 1;
+ boolean hasPermission();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder();
+ }
+ public static final class RevokeRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements RevokeRequestOrBuilder {
+ // Use RevokeRequest.newBuilder() to construct.
+ private RevokeRequest(Builder builder) {
+ super(builder);
+ }
+ private RevokeRequest(boolean noInit) {}
+
+ private static final RevokeRequest defaultInstance;
+ public static RevokeRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RevokeRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .UserPermission permission = 1;
+ public static final int PERMISSION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_;
+ public boolean hasPermission() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() {
+ return permission_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() {
+ return permission_;
+ }
+
+ private void initFields() {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasPermission()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getPermission().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, permission_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, permission_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) obj;
+
+ boolean result = true;
+ result = result && (hasPermission() == other.hasPermission());
+ if (hasPermission()) {
+ result = result && getPermission()
+ .equals(other.getPermission());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasPermission()) {
+ hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
+ hash = (53 * hash) + getPermission().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (permissionBuilder_ == null) {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ } else {
+ permissionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (permissionBuilder_ == null) {
+ result.permission_ = permission_;
+ } else {
+ result.permission_ = permissionBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance()) return this;
+ if (other.hasPermission()) {
+ mergePermission(other.getPermission());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasPermission()) {
+
+ return false;
+ }
+ if (!getPermission().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder();
+ if (hasPermission()) {
+ subBuilder.mergeFrom(getPermission());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setPermission(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .UserPermission permission = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_;
+ public boolean hasPermission() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission() {
+ if (permissionBuilder_ == null) {
+ return permission_;
+ } else {
+ return permissionBuilder_.getMessage();
+ }
+ }
+ public Builder setPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ permission_ = value;
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setPermission(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ permission_ = builderForValue.build();
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergePermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ permission_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance()) {
+ permission_ =
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder(permission_).mergeFrom(value).buildPartial();
+ } else {
+ permission_ = value;
+ }
+ onChanged();
+ } else {
+ permissionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearPermission() {
+ if (permissionBuilder_ == null) {
+ permission_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance();
+ onChanged();
+ } else {
+ permissionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getPermissionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder() {
+ if (permissionBuilder_ != null) {
+ return permissionBuilder_.getMessageOrBuilder();
+ } else {
+ return permission_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ getPermissionFieldBuilder() {
+ if (permissionBuilder_ == null) {
+ permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>(
+ permission_,
+ getParentForChildren(),
+ isClean());
+ permission_ = null;
+ }
+ return permissionBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:RevokeRequest)
+ }
+
+ static {
+ defaultInstance = new RevokeRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:RevokeRequest)
+ }
+
+ public interface RevokeResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class RevokeResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements RevokeResponseOrBuilder {
+ // Use RevokeResponse.newBuilder() to construct.
+ private RevokeResponse(Builder builder) {
+ super(builder);
+ }
+ private RevokeResponse(boolean noInit) {}
+
+ private static final RevokeResponse defaultInstance;
+ public static RevokeResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RevokeResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_RevokeResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:RevokeResponse)
+ }
+
+ static {
+ defaultInstance = new RevokeResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:RevokeResponse)
+ }
+
+ public interface UserPermissionsRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes table = 1;
+ boolean hasTable();
+ com.google.protobuf.ByteString getTable();
+ }
+ public static final class UserPermissionsRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements UserPermissionsRequestOrBuilder {
+ // Use UserPermissionsRequest.newBuilder() to construct.
+ private UserPermissionsRequest(Builder builder) {
+ super(builder);
+ }
+ private UserPermissionsRequest(boolean noInit) {}
+
+ private static final UserPermissionsRequest defaultInstance;
+ public static UserPermissionsRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public UserPermissionsRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bytes table = 1;
+ public static final int TABLE_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString table_;
+ public boolean hasTable() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getTable() {
+ return table_;
+ }
+
+ private void initFields() {
+ table_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasTable()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, table_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, table_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) obj;
+
+ boolean result = true;
+ result = result && (hasTable() == other.hasTable());
+ if (hasTable()) {
+ result = result && getTable()
+ .equals(other.getTable());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasTable()) {
+ hash = (37 * hash) + TABLE_FIELD_NUMBER;
+ hash = (53 * hash) + getTable().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ table_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.table_ = table_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance()) return this;
+ if (other.hasTable()) {
+ setTable(other.getTable());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasTable()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ table_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bytes table = 1;
+ private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasTable() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getTable() {
+ return table_;
+ }
+ public Builder setTable(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ table_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearTable() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ table_ = getDefaultInstance().getTable();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:UserPermissionsRequest)
+ }
+
+ static {
+ defaultInstance = new UserPermissionsRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:UserPermissionsRequest)
+ }
+
+ public interface UserPermissionsResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .UserPermission permission = 1;
+ java.util.List
+ getPermissionList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index);
+ int getPermissionCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ getPermissionOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(
+ int index);
+ }
+ public static final class UserPermissionsResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements UserPermissionsResponseOrBuilder {
+ // Use UserPermissionsResponse.newBuilder() to construct.
+ private UserPermissionsResponse(Builder builder) {
+ super(builder);
+ }
+ private UserPermissionsResponse(boolean noInit) {}
+
+ private static final UserPermissionsResponse defaultInstance;
+ public static UserPermissionsResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public UserPermissionsResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable;
+ }
+
+ // repeated .UserPermission permission = 1;
+ public static final int PERMISSION_FIELD_NUMBER = 1;
+ private java.util.List permission_;
+ public java.util.List getPermissionList() {
+ return permission_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ getPermissionOrBuilderList() {
+ return permission_;
+ }
+ public int getPermissionCount() {
+ return permission_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) {
+ return permission_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(
+ int index) {
+ return permission_.get(index);
+ }
+
+ private void initFields() {
+ permission_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getPermissionCount(); i++) {
+ if (!getPermission(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < permission_.size(); i++) {
+ output.writeMessage(1, permission_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < permission_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, permission_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) obj;
+
+ boolean result = true;
+ result = result && getPermissionList()
+ .equals(other.getPermissionList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getPermissionCount() > 0) {
+ hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
+ hash = (53 * hash) + getPermissionList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (permissionBuilder_ == null) {
+ permission_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ permissionBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (permissionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ permission_ = java.util.Collections.unmodifiableList(permission_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.permission_ = permission_;
+ } else {
+ result.permission_ = permissionBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()) return this;
+ if (permissionBuilder_ == null) {
+ if (!other.permission_.isEmpty()) {
+ if (permission_.isEmpty()) {
+ permission_ = other.permission_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensurePermissionIsMutable();
+ permission_.addAll(other.permission_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.permission_.isEmpty()) {
+ if (permissionBuilder_.isEmpty()) {
+ permissionBuilder_.dispose();
+ permissionBuilder_ = null;
+ permission_ = other.permission_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ permissionBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getPermissionFieldBuilder() : null;
+ } else {
+ permissionBuilder_.addAllMessages(other.permission_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getPermissionCount(); i++) {
+ if (!getPermission(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addPermission(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .UserPermission permission = 1;
+ private java.util.List permission_ =
+ java.util.Collections.emptyList();
+ private void ensurePermissionIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ permission_ = new java.util.ArrayList(permission_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder> permissionBuilder_;
+
+ public java.util.List getPermissionList() {
+ if (permissionBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(permission_);
+ } else {
+ return permissionBuilder_.getMessageList();
+ }
+ }
+ public int getPermissionCount() {
+ if (permissionBuilder_ == null) {
+ return permission_.size();
+ } else {
+ return permissionBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission getPermission(int index) {
+ if (permissionBuilder_ == null) {
+ return permission_.get(index);
+ } else {
+ return permissionBuilder_.getMessage(index);
+ }
+ }
+ public Builder setPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionIsMutable();
+ permission_.set(index, value);
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionIsMutable();
+ permission_.add(value);
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionIsMutable();
+ permission_.add(index, value);
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addPermission(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.add(builderForValue.build());
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllPermission(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission> values) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ super.addAll(values, permission_);
+ onChanged();
+ } else {
+ permissionBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearPermission() {
+ if (permissionBuilder_ == null) {
+ permission_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ permissionBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removePermission(int index) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.remove(index);
+ onChanged();
+ } else {
+ permissionBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder getPermissionBuilder(
+ int index) {
+ return getPermissionFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder getPermissionOrBuilder(
+ int index) {
+ if (permissionBuilder_ == null) {
+ return permission_.get(index); } else {
+ return permissionBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ getPermissionOrBuilderList() {
+ if (permissionBuilder_ != null) {
+ return permissionBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(permission_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder() {
+ return getPermissionFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder addPermissionBuilder(
+ int index) {
+ return getPermissionFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.getDefaultInstance());
+ }
+ public java.util.List
+ getPermissionBuilderList() {
+ return getPermissionFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>
+ getPermissionFieldBuilder() {
+ if (permissionBuilder_ == null) {
+ permissionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionOrBuilder>(
+ permission_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ permission_ = null;
+ }
+ return permissionBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:UserPermissionsResponse)
+ }
+
+ static {
+ defaultInstance = new UserPermissionsResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:UserPermissionsResponse)
+ }
+
+ public interface CheckPermissionsRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .Permission permission = 1;
+ java.util.List
+ getPermissionList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index);
+ int getPermissionCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder(
+ int index);
+ }
+ public static final class CheckPermissionsRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements CheckPermissionsRequestOrBuilder {
+ // Use CheckPermissionsRequest.newBuilder() to construct.
+ private CheckPermissionsRequest(Builder builder) {
+ super(builder);
+ }
+ private CheckPermissionsRequest(boolean noInit) {}
+
+ private static final CheckPermissionsRequest defaultInstance;
+ public static CheckPermissionsRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CheckPermissionsRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable;
+ }
+
+ // repeated .Permission permission = 1;
+ public static final int PERMISSION_FIELD_NUMBER = 1;
+ private java.util.List permission_;
+ public java.util.List getPermissionList() {
+ return permission_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionOrBuilderList() {
+ return permission_;
+ }
+ public int getPermissionCount() {
+ return permission_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) {
+ return permission_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder(
+ int index) {
+ return permission_.get(index);
+ }
+
+ private void initFields() {
+ permission_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < permission_.size(); i++) {
+ output.writeMessage(1, permission_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < permission_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, permission_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) obj;
+
+ boolean result = true;
+ result = result && getPermissionList()
+ .equals(other.getPermissionList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getPermissionCount() > 0) {
+ hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
+ hash = (53 * hash) + getPermissionList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getPermissionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (permissionBuilder_ == null) {
+ permission_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ permissionBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest(this);
+ int from_bitField0_ = bitField0_;
+ if (permissionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ permission_ = java.util.Collections.unmodifiableList(permission_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.permission_ = permission_;
+ } else {
+ result.permission_ = permissionBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance()) return this;
+ if (permissionBuilder_ == null) {
+ if (!other.permission_.isEmpty()) {
+ if (permission_.isEmpty()) {
+ permission_ = other.permission_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensurePermissionIsMutable();
+ permission_.addAll(other.permission_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.permission_.isEmpty()) {
+ if (permissionBuilder_.isEmpty()) {
+ permissionBuilder_.dispose();
+ permissionBuilder_ = null;
+ permission_ = other.permission_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ permissionBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getPermissionFieldBuilder() : null;
+ } else {
+ permissionBuilder_.addAllMessages(other.permission_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addPermission(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .Permission permission = 1;
+ private java.util.List permission_ =
+ java.util.Collections.emptyList();
+ private void ensurePermissionIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ permission_ = new java.util.ArrayList(permission_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder> permissionBuilder_;
+
+ public java.util.List getPermissionList() {
+ if (permissionBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(permission_);
+ } else {
+ return permissionBuilder_.getMessageList();
+ }
+ }
+ public int getPermissionCount() {
+ if (permissionBuilder_ == null) {
+ return permission_.size();
+ } else {
+ return permissionBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission getPermission(int index) {
+ if (permissionBuilder_ == null) {
+ return permission_.get(index);
+ } else {
+ return permissionBuilder_.getMessage(index);
+ }
+ }
+ public Builder setPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionIsMutable();
+ permission_.set(index, value);
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermission(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionIsMutable();
+ permission_.add(value);
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission value) {
+ if (permissionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePermissionIsMutable();
+ permission_.add(index, value);
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addPermission(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.add(builderForValue.build());
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addPermission(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder builderForValue) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ permissionBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllPermission(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission> values) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ super.addAll(values, permission_);
+ onChanged();
+ } else {
+ permissionBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearPermission() {
+ if (permissionBuilder_ == null) {
+ permission_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ permissionBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removePermission(int index) {
+ if (permissionBuilder_ == null) {
+ ensurePermissionIsMutable();
+ permission_.remove(index);
+ onChanged();
+ } else {
+ permissionBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder getPermissionBuilder(
+ int index) {
+ return getPermissionFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder getPermissionOrBuilder(
+ int index) {
+ if (permissionBuilder_ == null) {
+ return permission_.get(index); } else {
+ return permissionBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionOrBuilderList() {
+ if (permissionBuilder_ != null) {
+ return permissionBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(permission_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder() {
+ return getPermissionFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder addPermissionBuilder(
+ int index) {
+ return getPermissionFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance());
+ }
+ public java.util.List
+ getPermissionBuilderList() {
+ return getPermissionFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>
+ getPermissionFieldBuilder() {
+ if (permissionBuilder_ == null) {
+ permissionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.PermissionOrBuilder>(
+ permission_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ permission_ = null;
+ }
+ return permissionBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:CheckPermissionsRequest)
+ }
+
+ static {
+ defaultInstance = new CheckPermissionsRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:CheckPermissionsRequest)
+ }
+
+ public interface CheckPermissionsResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class CheckPermissionsResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements CheckPermissionsResponseOrBuilder {
+ // Use CheckPermissionsResponse.newBuilder() to construct.
+ private CheckPermissionsResponse(Builder builder) {
+ super(builder);
+ }
+ private CheckPermissionsResponse(boolean noInit) {}
+
+ private static final CheckPermissionsResponse defaultInstance;
+ public static CheckPermissionsResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CheckPermissionsResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_CheckPermissionsResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:CheckPermissionsResponse)
+ }
+
+ static {
+ defaultInstance = new CheckPermissionsResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:CheckPermissionsResponse)
+ }
+
+ public static abstract class AccessControlService
+ implements com.google.protobuf.Service {
+ protected AccessControlService() {}
+
+ public interface Interface {
+ public abstract void grant(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void revoke(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getUserPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void checkPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new AccessControlService() {
+ @java.lang.Override
+ public void grant(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.grant(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void revoke(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.revoke(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getUserPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getUserPermissions(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void checkPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.checkPermissions(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.grant(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)request);
+ case 1:
+ return impl.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request);
+ case 2:
+ return impl.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request);
+ case 3:
+ return impl.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ public abstract void grant(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void revoke(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getUserPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void checkPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.grant(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 2:
+ this.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 3:
+ this.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void grant(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance()));
+ }
+
+ public void revoke(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance()));
+ }
+
+ public void getUserPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()));
+ }
+
+ public void checkPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse grant(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse revoke(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance());
+ }
+
+ }
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_Permission_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_Permission_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_UserPermission_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_UserPermission_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_UserTablePermissions_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_UserTablePermissions_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_UserTablePermissions_UserPermissions_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GrantRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GrantRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GrantResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GrantResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_RevokeRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_RevokeRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_RevokeResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_RevokeResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_UserPermissionsRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_UserPermissionsRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_UserPermissionsResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_UserPermissionsResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_CheckPermissionsRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_CheckPermissionsRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_CheckPermissionsResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_CheckPermissionsResponse_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\023AccessControl.proto\"\242\001\n\nPermission\022\"\n\006" +
+ "action\030\001 \003(\0162\022.Permission.Action\022\r\n\005tabl" +
+ "e\030\002 \001(\014\022\016\n\006family\030\003 \001(\014\022\021\n\tqualifier\030\004 \001" +
+ "(\014\">\n\006Action\022\010\n\004READ\020\000\022\t\n\005WRITE\020\001\022\010\n\004EXE" +
+ "C\020\002\022\n\n\006CREATE\020\003\022\t\n\005ADMIN\020\004\"?\n\016UserPermis" +
+ "sion\022\014\n\004user\030\001 \002(\014\022\037\n\npermission\030\002 \002(\0132\013" +
+ ".Permission\"\225\001\n\024UserTablePermissions\022:\n\013" +
+ "permissions\030\001 \003(\0132%.UserTablePermissions" +
+ ".UserPermissions\032A\n\017UserPermissions\022\014\n\004u" +
+ "ser\030\001 \002(\014\022 \n\013permissions\030\002 \003(\0132\013.Permiss",
+ "ion\"3\n\014GrantRequest\022#\n\npermission\030\001 \002(\0132" +
+ "\017.UserPermission\"\017\n\rGrantResponse\"4\n\rRev" +
+ "okeRequest\022#\n\npermission\030\001 \002(\0132\017.UserPer" +
+ "mission\"\020\n\016RevokeResponse\"\'\n\026UserPermiss" +
+ "ionsRequest\022\r\n\005table\030\001 \002(\014\">\n\027UserPermis" +
+ "sionsResponse\022#\n\npermission\030\001 \003(\0132\017.User" +
+ "Permission\":\n\027CheckPermissionsRequest\022\037\n" +
+ "\npermission\030\001 \003(\0132\013.Permission\"\032\n\030CheckP" +
+ "ermissionsResponse2\373\001\n\024AccessControlServ" +
+ "ice\022&\n\005grant\022\r.GrantRequest\032\016.GrantRespo",
+ "nse\022)\n\006revoke\022\016.RevokeRequest\032\017.RevokeRe" +
+ "sponse\022G\n\022getUserPermissions\022\027.UserPermi" +
+ "ssionsRequest\032\030.UserPermissionsResponse\022" +
+ "G\n\020checkPermissions\022\030.CheckPermissionsRe" +
+ "quest\032\031.CheckPermissionsResponseBI\n*org." +
+ "apache.hadoop.hbase.protobuf.generatedB\023" +
+ "AccessControlProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_Permission_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_Permission_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_Permission_descriptor,
+ new java.lang.String[] { "Action", "Table", "Family", "Qualifier", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Builder.class);
+ internal_static_UserPermission_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_UserPermission_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_UserPermission_descriptor,
+ new java.lang.String[] { "User", "Permission", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermission.Builder.class);
+ internal_static_UserTablePermissions_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_UserTablePermissions_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_UserTablePermissions_descriptor,
+ new java.lang.String[] { "Permissions", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.Builder.class);
+ internal_static_UserTablePermissions_UserPermissions_descriptor =
+ internal_static_UserTablePermissions_descriptor.getNestedTypes().get(0);
+ internal_static_UserTablePermissions_UserPermissions_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_UserTablePermissions_UserPermissions_descriptor,
+ new java.lang.String[] { "User", "Permissions", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserTablePermissions.UserPermissions.Builder.class);
+ internal_static_GrantRequest_descriptor =
+ getDescriptor().getMessageTypes().get(3);
+ internal_static_GrantRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GrantRequest_descriptor,
+ new java.lang.String[] { "Permission", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantRequest.Builder.class);
+ internal_static_GrantResponse_descriptor =
+ getDescriptor().getMessageTypes().get(4);
+ internal_static_GrantResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GrantResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GrantResponse.Builder.class);
+ internal_static_RevokeRequest_descriptor =
+ getDescriptor().getMessageTypes().get(5);
+ internal_static_RevokeRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_RevokeRequest_descriptor,
+ new java.lang.String[] { "Permission", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.Builder.class);
+ internal_static_RevokeResponse_descriptor =
+ getDescriptor().getMessageTypes().get(6);
+ internal_static_RevokeResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_RevokeResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.Builder.class);
+ internal_static_UserPermissionsRequest_descriptor =
+ getDescriptor().getMessageTypes().get(7);
+ internal_static_UserPermissionsRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_UserPermissionsRequest_descriptor,
+ new java.lang.String[] { "Table", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class);
+ internal_static_UserPermissionsResponse_descriptor =
+ getDescriptor().getMessageTypes().get(8);
+ internal_static_UserPermissionsResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_UserPermissionsResponse_descriptor,
+ new java.lang.String[] { "Permission", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class);
+ internal_static_CheckPermissionsRequest_descriptor =
+ getDescriptor().getMessageTypes().get(9);
+ internal_static_CheckPermissionsRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_CheckPermissionsRequest_descriptor,
+ new java.lang.String[] { "Permission", },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.Builder.class);
+ internal_static_CheckPermissionsResponse_descriptor =
+ getDescriptor().getMessageTypes().get(10);
+ internal_static_CheckPermissionsResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_CheckPermissionsResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
new file mode 100644
index 0000000..73434c1
--- /dev/null
+++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
@@ -0,0 +1,16766 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Admin.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class AdminProtos {
+ private AdminProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface GetRegionInfoRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionSpecifier region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
+
+ // optional bool compactionState = 2;
+ boolean hasCompactionState();
+ boolean getCompactionState();
+ }
+ public static final class GetRegionInfoRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetRegionInfoRequestOrBuilder {
+ // Use GetRegionInfoRequest.newBuilder() to construct.
+ private GetRegionInfoRequest(Builder builder) {
+ super(builder);
+ }
+ private GetRegionInfoRequest(boolean noInit) {}
+
+ private static final GetRegionInfoRequest defaultInstance;
+ public static GetRegionInfoRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetRegionInfoRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionSpecifier region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // optional bool compactionState = 2;
+ public static final int COMPACTIONSTATE_FIELD_NUMBER = 2;
+ private boolean compactionState_;
+ public boolean hasCompactionState() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public boolean getCompactionState() {
+ return compactionState_;
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ compactionState_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBool(2, compactionState_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(2, compactionState_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && (hasCompactionState() == other.hasCompactionState());
+ if (hasCompactionState()) {
+ result = result && (getCompactionState()
+ == other.getCompactionState());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (hasCompactionState()) {
+ hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getCompactionState());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ compactionState_ = false;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.compactionState_ = compactionState_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (other.hasCompactionState()) {
+ setCompactionState(other.getCompactionState());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ compactionState_ = input.readBool();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionSpecifier region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // optional bool compactionState = 2;
+ private boolean compactionState_ ;
+ public boolean hasCompactionState() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public boolean getCompactionState() {
+ return compactionState_;
+ }
+ public Builder setCompactionState(boolean value) {
+ bitField0_ |= 0x00000002;
+ compactionState_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearCompactionState() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ compactionState_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest)
+ }
+
+ static {
+ defaultInstance = new GetRegionInfoRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetRegionInfoRequest)
+ }
+
+ public interface GetRegionInfoResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionInfo regionInfo = 1;
+ boolean hasRegionInfo();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
+
+ // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
+ boolean hasCompactionState();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState();
+ }
+ public static final class GetRegionInfoResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetRegionInfoResponseOrBuilder {
+ // Use GetRegionInfoResponse.newBuilder() to construct.
+ private GetRegionInfoResponse(Builder builder) {
+ super(builder);
+ }
+ private GetRegionInfoResponse(boolean noInit) {}
+
+ private static final GetRegionInfoResponse defaultInstance;
+ public static GetRegionInfoResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetRegionInfoResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable;
+ }
+
+ public enum CompactionState
+ implements com.google.protobuf.ProtocolMessageEnum {
+ NONE(0, 0),
+ MINOR(1, 1),
+ MAJOR(2, 2),
+ MAJOR_AND_MINOR(3, 3),
+ ;
+
+ public static final int NONE_VALUE = 0;
+ public static final int MINOR_VALUE = 1;
+ public static final int MAJOR_VALUE = 2;
+ public static final int MAJOR_AND_MINOR_VALUE = 3;
+
+
+ public final int getNumber() { return value; }
+
+ public static CompactionState valueOf(int value) {
+ switch (value) {
+ case 0: return NONE;
+ case 1: return MINOR;
+ case 2: return MAJOR;
+ case 3: return MAJOR_AND_MINOR;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public CompactionState findValueByNumber(int number) {
+ return CompactionState.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final CompactionState[] VALUES = {
+ NONE, MINOR, MAJOR, MAJOR_AND_MINOR,
+ };
+
+ public static CompactionState valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private CompactionState(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:GetRegionInfoResponse.CompactionState)
+ }
+
+ private int bitField0_;
+ // required .RegionInfo regionInfo = 1;
+ public static final int REGIONINFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
+ public boolean hasRegionInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
+ return regionInfo_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
+ return regionInfo_;
+ }
+
+ // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
+ public static final int COMPACTIONSTATE_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_;
+ public boolean hasCompactionState() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() {
+ return compactionState_;
+ }
+
+ private void initFields() {
+ regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegionInfo()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegionInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, regionInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeEnum(2, compactionState_.getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, regionInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(2, compactionState_.getNumber());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj;
+
+ boolean result = true;
+ result = result && (hasRegionInfo() == other.hasRegionInfo());
+ if (hasRegionInfo()) {
+ result = result && getRegionInfo()
+ .equals(other.getRegionInfo());
+ }
+ result = result && (hasCompactionState() == other.hasCompactionState());
+ if (hasCompactionState()) {
+ result = result &&
+ (getCompactionState() == other.getCompactionState());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegionInfo()) {
+ hash = (37 * hash) + REGIONINFO_FIELD_NUMBER;
+ hash = (53 * hash) + getRegionInfo().hashCode();
+ }
+ if (hasCompactionState()) {
+ hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getCompactionState());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ } else {
+ regionInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionInfoBuilder_ == null) {
+ result.regionInfo_ = regionInfo_;
+ } else {
+ result.regionInfo_ = regionInfoBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.compactionState_ = compactionState_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this;
+ if (other.hasRegionInfo()) {
+ mergeRegionInfo(other.getRegionInfo());
+ }
+ if (other.hasCompactionState()) {
+ setCompactionState(other.getCompactionState());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegionInfo()) {
+
+ return false;
+ }
+ if (!getRegionInfo().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder();
+ if (hasRegionInfo()) {
+ subBuilder.mergeFrom(getRegionInfo());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegionInfo(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ bitField0_ |= 0x00000002;
+ compactionState_ = value;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionInfo regionInfo = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
+ public boolean hasRegionInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_;
+ } else {
+ return regionInfoBuilder_.getMessage();
+ }
+ }
+ public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ regionInfo_ = value;
+ onChanged();
+ } else {
+ regionInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegionInfo(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) {
+ regionInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial();
+ } else {
+ regionInfo_ = value;
+ }
+ onChanged();
+ } else {
+ regionInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegionInfo() {
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ onChanged();
+ } else {
+ regionInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionInfoFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
+ if (regionInfoBuilder_ != null) {
+ return regionInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return regionInfo_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoFieldBuilder() {
+ if (regionInfoBuilder_ == null) {
+ regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
+ regionInfo_,
+ getParentForChildren(),
+ isClean());
+ regionInfo_ = null;
+ }
+ return regionInfoBuilder_;
+ }
+
+ // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
+ public boolean hasCompactionState() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() {
+ return compactionState_;
+ }
+ public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ compactionState_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearCompactionState() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse)
+ }
+
+ static {
+ defaultInstance = new GetRegionInfoResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetRegionInfoResponse)
+ }
+
+ public interface GetStoreFileRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionSpecifier region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
+
+ // repeated bytes family = 2;
+ java.util.List getFamilyList();
+ int getFamilyCount();
+ com.google.protobuf.ByteString getFamily(int index);
+ }
+ public static final class GetStoreFileRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetStoreFileRequestOrBuilder {
+ // Use GetStoreFileRequest.newBuilder() to construct.
+ private GetStoreFileRequest(Builder builder) {
+ super(builder);
+ }
+ private GetStoreFileRequest(boolean noInit) {}
+
+ private static final GetStoreFileRequest defaultInstance;
+ public static GetStoreFileRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetStoreFileRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionSpecifier region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // repeated bytes family = 2;
+ public static final int FAMILY_FIELD_NUMBER = 2;
+ private java.util.List family_;
+ public java.util.List
+ getFamilyList() {
+ return family_;
+ }
+ public int getFamilyCount() {
+ return family_.size();
+ }
+ public com.google.protobuf.ByteString getFamily(int index) {
+ return family_.get(index);
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ family_ = java.util.Collections.emptyList();;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ for (int i = 0; i < family_.size(); i++) {
+ output.writeBytes(2, family_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ {
+ int dataSize = 0;
+ for (int i = 0; i < family_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(family_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getFamilyList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && getFamilyList()
+ .equals(other.getFamilyList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (getFamilyCount() > 0) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamilyList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ family_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ family_ = java.util.Collections.unmodifiableList(family_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.family_ = family_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (!other.family_.isEmpty()) {
+ if (family_.isEmpty()) {
+ family_ = other.family_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureFamilyIsMutable();
+ family_.addAll(other.family_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 18: {
+ ensureFamilyIsMutable();
+ family_.add(input.readBytes());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionSpecifier region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // repeated bytes family = 2;
+ private java.util.List family_ = java.util.Collections.emptyList();;
+ private void ensureFamilyIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ family_ = new java.util.ArrayList(family_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+ public java.util.List
+ getFamilyList() {
+ return java.util.Collections.unmodifiableList(family_);
+ }
+ public int getFamilyCount() {
+ return family_.size();
+ }
+ public com.google.protobuf.ByteString getFamily(int index) {
+ return family_.get(index);
+ }
+ public Builder setFamily(
+ int index, com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyIsMutable();
+ family_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyIsMutable();
+ family_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllFamily(
+ java.lang.Iterable extends com.google.protobuf.ByteString> values) {
+ ensureFamilyIsMutable();
+ super.addAll(values, family_);
+ onChanged();
+ return this;
+ }
+ public Builder clearFamily() {
+ family_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:GetStoreFileRequest)
+ }
+
+ static {
+ defaultInstance = new GetStoreFileRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetStoreFileRequest)
+ }
+
+ public interface GetStoreFileResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated string storeFile = 1;
+ java.util.List getStoreFileList();
+ int getStoreFileCount();
+ String getStoreFile(int index);
+ }
+ public static final class GetStoreFileResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetStoreFileResponseOrBuilder {
+ // Use GetStoreFileResponse.newBuilder() to construct.
+ private GetStoreFileResponse(Builder builder) {
+ super(builder);
+ }
+ private GetStoreFileResponse(boolean noInit) {}
+
+ private static final GetStoreFileResponse defaultInstance;
+ public static GetStoreFileResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetStoreFileResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable;
+ }
+
+ // repeated string storeFile = 1;
+ public static final int STOREFILE_FIELD_NUMBER = 1;
+ private com.google.protobuf.LazyStringList storeFile_;
+ public java.util.List
+ getStoreFileList() {
+ return storeFile_;
+ }
+ public int getStoreFileCount() {
+ return storeFile_.size();
+ }
+ public String getStoreFile(int index) {
+ return storeFile_.get(index);
+ }
+
+ private void initFields() {
+ storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < storeFile_.size(); i++) {
+ output.writeBytes(1, storeFile_.getByteString(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < storeFile_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(storeFile_.getByteString(i));
+ }
+ size += dataSize;
+ size += 1 * getStoreFileList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) obj;
+
+ boolean result = true;
+ result = result && getStoreFileList()
+ .equals(other.getStoreFileList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getStoreFileCount() > 0) {
+ hash = (37 * hash) + STOREFILE_FIELD_NUMBER;
+ hash = (53 * hash) + getStoreFileList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(
+ storeFile_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.storeFile_ = storeFile_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()) return this;
+ if (!other.storeFile_.isEmpty()) {
+ if (storeFile_.isEmpty()) {
+ storeFile_ = other.storeFile_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureStoreFileIsMutable();
+ storeFile_.addAll(other.storeFile_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ ensureStoreFileIsMutable();
+ storeFile_.add(input.readBytes());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated string storeFile = 1;
+ private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ private void ensureStoreFileIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List
+ getStoreFileList() {
+ return java.util.Collections.unmodifiableList(storeFile_);
+ }
+ public int getStoreFileCount() {
+ return storeFile_.size();
+ }
+ public String getStoreFile(int index) {
+ return storeFile_.get(index);
+ }
+ public Builder setStoreFile(
+ int index, String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureStoreFileIsMutable();
+ storeFile_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addStoreFile(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureStoreFileIsMutable();
+ storeFile_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllStoreFile(
+ java.lang.Iterable values) {
+ ensureStoreFileIsMutable();
+ super.addAll(values, storeFile_);
+ onChanged();
+ return this;
+ }
+ public Builder clearStoreFile() {
+ storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+ void addStoreFile(com.google.protobuf.ByteString value) {
+ ensureStoreFileIsMutable();
+ storeFile_.add(value);
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:GetStoreFileResponse)
+ }
+
+ static {
+ defaultInstance = new GetStoreFileResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetStoreFileResponse)
+ }
+
+ public interface GetOnlineRegionRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class GetOnlineRegionRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetOnlineRegionRequestOrBuilder {
+ // Use GetOnlineRegionRequest.newBuilder() to construct.
+ private GetOnlineRegionRequest(Builder builder) {
+ super(builder);
+ }
+ private GetOnlineRegionRequest(boolean noInit) {}
+
+ private static final GetOnlineRegionRequest defaultInstance;
+ public static GetOnlineRegionRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetOnlineRegionRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest)
+ }
+
+ static {
+ defaultInstance = new GetOnlineRegionRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest)
+ }
+
+ public interface GetOnlineRegionResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .RegionInfo regionInfo = 1;
+ java.util.List
+ getRegionInfoList();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
+ int getRegionInfoCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
+ int index);
+ }
+ public static final class GetOnlineRegionResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetOnlineRegionResponseOrBuilder {
+ // Use GetOnlineRegionResponse.newBuilder() to construct.
+ private GetOnlineRegionResponse(Builder builder) {
+ super(builder);
+ }
+ private GetOnlineRegionResponse(boolean noInit) {}
+
+ private static final GetOnlineRegionResponse defaultInstance;
+ public static GetOnlineRegionResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetOnlineRegionResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable;
+ }
+
+ // repeated .RegionInfo regionInfo = 1;
+ public static final int REGIONINFO_FIELD_NUMBER = 1;
+ private java.util.List regionInfo_;
+ public java.util.List getRegionInfoList() {
+ return regionInfo_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoOrBuilderList() {
+ return regionInfo_;
+ }
+ public int getRegionInfoCount() {
+ return regionInfo_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
+ return regionInfo_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
+ int index) {
+ return regionInfo_.get(index);
+ }
+
+ private void initFields() {
+ regionInfo_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getRegionInfoCount(); i++) {
+ if (!getRegionInfo(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < regionInfo_.size(); i++) {
+ output.writeMessage(1, regionInfo_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < regionInfo_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, regionInfo_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj;
+
+ boolean result = true;
+ result = result && getRegionInfoList()
+ .equals(other.getRegionInfoList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getRegionInfoCount() > 0) {
+ hash = (37 * hash) + REGIONINFO_FIELD_NUMBER;
+ hash = (53 * hash) + getRegionInfoList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ regionInfoBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (regionInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.regionInfo_ = regionInfo_;
+ } else {
+ result.regionInfo_ = regionInfoBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this;
+ if (regionInfoBuilder_ == null) {
+ if (!other.regionInfo_.isEmpty()) {
+ if (regionInfo_.isEmpty()) {
+ regionInfo_ = other.regionInfo_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureRegionInfoIsMutable();
+ regionInfo_.addAll(other.regionInfo_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.regionInfo_.isEmpty()) {
+ if (regionInfoBuilder_.isEmpty()) {
+ regionInfoBuilder_.dispose();
+ regionInfoBuilder_ = null;
+ regionInfo_ = other.regionInfo_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ regionInfoBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getRegionInfoFieldBuilder() : null;
+ } else {
+ regionInfoBuilder_.addAllMessages(other.regionInfo_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getRegionInfoCount(); i++) {
+ if (!getRegionInfo(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addRegionInfo(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .RegionInfo regionInfo = 1;
+ private java.util.List regionInfo_ =
+ java.util.Collections.emptyList();
+ private void ensureRegionInfoIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ regionInfo_ = new java.util.ArrayList(regionInfo_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
+
+ public java.util.List getRegionInfoList() {
+ if (regionInfoBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(regionInfo_);
+ } else {
+ return regionInfoBuilder_.getMessageList();
+ }
+ }
+ public int getRegionInfoCount() {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_.size();
+ } else {
+ return regionInfoBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_.get(index);
+ } else {
+ return regionInfoBuilder_.getMessage(index);
+ }
+ }
+ public Builder setRegionInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionInfoIsMutable();
+ regionInfo_.set(index, value);
+ onChanged();
+ } else {
+ regionInfoBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setRegionInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ regionInfoBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(value);
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addRegionInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(index, value);
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addRegionInfo(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(builderForValue.build());
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addRegionInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllRegionInfo(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> values) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ super.addAll(values, regionInfo_);
+ onChanged();
+ } else {
+ regionInfoBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearRegionInfo() {
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ regionInfoBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removeRegionInfo(int index) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.remove(index);
+ onChanged();
+ } else {
+ regionInfoBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
+ int index) {
+ return getRegionInfoFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
+ int index) {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_.get(index); } else {
+ return regionInfoBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoOrBuilderList() {
+ if (regionInfoBuilder_ != null) {
+ return regionInfoBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(regionInfo_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
+ return getRegionInfoFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
+ int index) {
+ return getRegionInfoFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
+ }
+ public java.util.List
+ getRegionInfoBuilderList() {
+ return getRegionInfoFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoFieldBuilder() {
+ if (regionInfoBuilder_ == null) {
+ regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
+ regionInfo_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ regionInfo_ = null;
+ }
+ return regionInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse)
+ }
+
+ static {
+ defaultInstance = new GetOnlineRegionResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse)
+ }
+
+ public interface OpenRegionRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1;
+ java.util.List
+ getOpenInfoList();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index);
+ int getOpenInfoCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>
+ getOpenInfoOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder(
+ int index);
+ }
+ public static final class OpenRegionRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements OpenRegionRequestOrBuilder {
+ // Use OpenRegionRequest.newBuilder() to construct.
+ private OpenRegionRequest(Builder builder) {
+ super(builder);
+ }
+ private OpenRegionRequest(boolean noInit) {}
+
+ private static final OpenRegionRequest defaultInstance;
+ public static OpenRegionRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public OpenRegionRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable;
+ }
+
+ public interface RegionOpenInfoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionInfo region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder();
+
+ // optional uint32 versionOfOfflineNode = 2;
+ boolean hasVersionOfOfflineNode();
+ int getVersionOfOfflineNode();
+ }
+ public static final class RegionOpenInfo extends
+ com.google.protobuf.GeneratedMessage
+ implements RegionOpenInfoOrBuilder {
+ // Use RegionOpenInfo.newBuilder() to construct.
+ private RegionOpenInfo(Builder builder) {
+ super(builder);
+ }
+ private RegionOpenInfo(boolean noInit) {}
+
+ private static final RegionOpenInfo defaultInstance;
+ public static RegionOpenInfo getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RegionOpenInfo getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionInfo region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // optional uint32 versionOfOfflineNode = 2;
+ public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2;
+ private int versionOfOfflineNode_;
+ public boolean hasVersionOfOfflineNode() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getVersionOfOfflineNode() {
+ return versionOfOfflineNode_;
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ versionOfOfflineNode_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt32(2, versionOfOfflineNode_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(2, versionOfOfflineNode_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode());
+ if (hasVersionOfOfflineNode()) {
+ result = result && (getVersionOfOfflineNode()
+ == other.getVersionOfOfflineNode());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (hasVersionOfOfflineNode()) {
+ hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER;
+ hash = (53 * hash) + getVersionOfOfflineNode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ versionOfOfflineNode_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.versionOfOfflineNode_ = versionOfOfflineNode_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (other.hasVersionOfOfflineNode()) {
+ setVersionOfOfflineNode(other.getVersionOfOfflineNode());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ versionOfOfflineNode_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionInfo region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // optional uint32 versionOfOfflineNode = 2;
+ private int versionOfOfflineNode_ ;
+ public boolean hasVersionOfOfflineNode() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getVersionOfOfflineNode() {
+ return versionOfOfflineNode_;
+ }
+ public Builder setVersionOfOfflineNode(int value) {
+ bitField0_ |= 0x00000002;
+ versionOfOfflineNode_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearVersionOfOfflineNode() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ versionOfOfflineNode_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:OpenRegionRequest.RegionOpenInfo)
+ }
+
+ static {
+ defaultInstance = new RegionOpenInfo(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:OpenRegionRequest.RegionOpenInfo)
+ }
+
+ // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1;
+ public static final int OPENINFO_FIELD_NUMBER = 1;
+ private java.util.List openInfo_;
+ public java.util.List getOpenInfoList() {
+ return openInfo_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>
+ getOpenInfoOrBuilderList() {
+ return openInfo_;
+ }
+ public int getOpenInfoCount() {
+ return openInfo_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) {
+ return openInfo_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder(
+ int index) {
+ return openInfo_.get(index);
+ }
+
+ private void initFields() {
+ openInfo_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getOpenInfoCount(); i++) {
+ if (!getOpenInfo(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < openInfo_.size(); i++) {
+ output.writeMessage(1, openInfo_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < openInfo_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, openInfo_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj;
+
+ boolean result = true;
+ result = result && getOpenInfoList()
+ .equals(other.getOpenInfoList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getOpenInfoCount() > 0) {
+ hash = (37 * hash) + OPENINFO_FIELD_NUMBER;
+ hash = (53 * hash) + getOpenInfoList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getOpenInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (openInfoBuilder_ == null) {
+ openInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ openInfoBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this);
+ int from_bitField0_ = bitField0_;
+ if (openInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ openInfo_ = java.util.Collections.unmodifiableList(openInfo_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.openInfo_ = openInfo_;
+ } else {
+ result.openInfo_ = openInfoBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this;
+ if (openInfoBuilder_ == null) {
+ if (!other.openInfo_.isEmpty()) {
+ if (openInfo_.isEmpty()) {
+ openInfo_ = other.openInfo_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureOpenInfoIsMutable();
+ openInfo_.addAll(other.openInfo_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.openInfo_.isEmpty()) {
+ if (openInfoBuilder_.isEmpty()) {
+ openInfoBuilder_.dispose();
+ openInfoBuilder_ = null;
+ openInfo_ = other.openInfo_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ openInfoBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getOpenInfoFieldBuilder() : null;
+ } else {
+ openInfoBuilder_.addAllMessages(other.openInfo_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getOpenInfoCount(); i++) {
+ if (!getOpenInfo(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addOpenInfo(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1;
+ private java.util.List openInfo_ =
+ java.util.Collections.emptyList();
+ private void ensureOpenInfoIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ openInfo_ = new java.util.ArrayList(openInfo_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_;
+
+ public java.util.List getOpenInfoList() {
+ if (openInfoBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(openInfo_);
+ } else {
+ return openInfoBuilder_.getMessageList();
+ }
+ }
+ public int getOpenInfoCount() {
+ if (openInfoBuilder_ == null) {
+ return openInfo_.size();
+ } else {
+ return openInfoBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) {
+ if (openInfoBuilder_ == null) {
+ return openInfo_.get(index);
+ } else {
+ return openInfoBuilder_.getMessage(index);
+ }
+ }
+ public Builder setOpenInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) {
+ if (openInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureOpenInfoIsMutable();
+ openInfo_.set(index, value);
+ onChanged();
+ } else {
+ openInfoBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setOpenInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) {
+ if (openInfoBuilder_ == null) {
+ ensureOpenInfoIsMutable();
+ openInfo_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ openInfoBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) {
+ if (openInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureOpenInfoIsMutable();
+ openInfo_.add(value);
+ onChanged();
+ } else {
+ openInfoBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addOpenInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) {
+ if (openInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureOpenInfoIsMutable();
+ openInfo_.add(index, value);
+ onChanged();
+ } else {
+ openInfoBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addOpenInfo(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) {
+ if (openInfoBuilder_ == null) {
+ ensureOpenInfoIsMutable();
+ openInfo_.add(builderForValue.build());
+ onChanged();
+ } else {
+ openInfoBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addOpenInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) {
+ if (openInfoBuilder_ == null) {
+ ensureOpenInfoIsMutable();
+ openInfo_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ openInfoBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllOpenInfo(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> values) {
+ if (openInfoBuilder_ == null) {
+ ensureOpenInfoIsMutable();
+ super.addAll(values, openInfo_);
+ onChanged();
+ } else {
+ openInfoBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearOpenInfo() {
+ if (openInfoBuilder_ == null) {
+ openInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ openInfoBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removeOpenInfo(int index) {
+ if (openInfoBuilder_ == null) {
+ ensureOpenInfoIsMutable();
+ openInfo_.remove(index);
+ onChanged();
+ } else {
+ openInfoBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder getOpenInfoBuilder(
+ int index) {
+ return getOpenInfoFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder(
+ int index) {
+ if (openInfoBuilder_ == null) {
+ return openInfo_.get(index); } else {
+ return openInfoBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>
+ getOpenInfoOrBuilderList() {
+ if (openInfoBuilder_ != null) {
+ return openInfoBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(openInfo_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder() {
+ return getOpenInfoFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder(
+ int index) {
+ return getOpenInfoFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance());
+ }
+ public java.util.List
+ getOpenInfoBuilderList() {
+ return getOpenInfoFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>
+ getOpenInfoFieldBuilder() {
+ if (openInfoBuilder_ == null) {
+ openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>(
+ openInfo_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ openInfo_ = null;
+ }
+ return openInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:OpenRegionRequest)
+ }
+
+ static {
+ defaultInstance = new OpenRegionRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:OpenRegionRequest)
+ }
+
+ public interface OpenRegionResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .OpenRegionResponse.RegionOpeningState openingState = 1;
+ java.util.List getOpeningStateList();
+ int getOpeningStateCount();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index);
+ }
+ public static final class OpenRegionResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements OpenRegionResponseOrBuilder {
+ // Use OpenRegionResponse.newBuilder() to construct.
+ private OpenRegionResponse(Builder builder) {
+ super(builder);
+ }
+ private OpenRegionResponse(boolean noInit) {}
+
+ private static final OpenRegionResponse defaultInstance;
+ public static OpenRegionResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public OpenRegionResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable;
+ }
+
+ public enum RegionOpeningState
+ implements com.google.protobuf.ProtocolMessageEnum {
+ OPENED(0, 0),
+ ALREADY_OPENED(1, 1),
+ FAILED_OPENING(2, 2),
+ ;
+
+ public static final int OPENED_VALUE = 0;
+ public static final int ALREADY_OPENED_VALUE = 1;
+ public static final int FAILED_OPENING_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static RegionOpeningState valueOf(int value) {
+ switch (value) {
+ case 0: return OPENED;
+ case 1: return ALREADY_OPENED;
+ case 2: return FAILED_OPENING;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public RegionOpeningState findValueByNumber(int number) {
+ return RegionOpeningState.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final RegionOpeningState[] VALUES = {
+ OPENED, ALREADY_OPENED, FAILED_OPENING,
+ };
+
+ public static RegionOpeningState valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private RegionOpeningState(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState)
+ }
+
+ // repeated .OpenRegionResponse.RegionOpeningState openingState = 1;
+ public static final int OPENINGSTATE_FIELD_NUMBER = 1;
+ private java.util.List openingState_;
+ public java.util.List getOpeningStateList() {
+ return openingState_;
+ }
+ public int getOpeningStateCount() {
+ return openingState_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) {
+ return openingState_.get(index);
+ }
+
+ private void initFields() {
+ openingState_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < openingState_.size(); i++) {
+ output.writeEnum(1, openingState_.get(i).getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < openingState_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeEnumSizeNoTag(openingState_.get(i).getNumber());
+ }
+ size += dataSize;
+ size += 1 * openingState_.size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj;
+
+ boolean result = true;
+ result = result && getOpeningStateList()
+ .equals(other.getOpeningStateList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getOpeningStateCount() > 0) {
+ hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnumList(getOpeningStateList());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ openingState_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ openingState_ = java.util.Collections.unmodifiableList(openingState_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.openingState_ = openingState_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this;
+ if (!other.openingState_.isEmpty()) {
+ if (openingState_.isEmpty()) {
+ openingState_ = other.openingState_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureOpeningStateIsMutable();
+ openingState_.addAll(other.openingState_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ addOpeningState(value);
+ }
+ break;
+ }
+ case 10: {
+ int length = input.readRawVarint32();
+ int oldLimit = input.pushLimit(length);
+ while(input.getBytesUntilLimit() > 0) {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ addOpeningState(value);
+ }
+ }
+ input.popLimit(oldLimit);
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .OpenRegionResponse.RegionOpeningState openingState = 1;
+ private java.util.List openingState_ =
+ java.util.Collections.emptyList();
+ private void ensureOpeningStateIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ openingState_ = new java.util.ArrayList(openingState_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List getOpeningStateList() {
+ return java.util.Collections.unmodifiableList(openingState_);
+ }
+ public int getOpeningStateCount() {
+ return openingState_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) {
+ return openingState_.get(index);
+ }
+ public Builder setOpeningState(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureOpeningStateIsMutable();
+ openingState_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureOpeningStateIsMutable();
+ openingState_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllOpeningState(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> values) {
+ ensureOpeningStateIsMutable();
+ super.addAll(values, openingState_);
+ onChanged();
+ return this;
+ }
+ public Builder clearOpeningState() {
+ openingState_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:OpenRegionResponse)
+ }
+
+ static {
+ defaultInstance = new OpenRegionResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:OpenRegionResponse)
+ }
+
+ public interface CloseRegionRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionSpecifier region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
+
+ // optional uint32 versionOfClosingNode = 2;
+ boolean hasVersionOfClosingNode();
+ int getVersionOfClosingNode();
+
+ // optional bool transitionInZK = 3 [default = true];
+ boolean hasTransitionInZK();
+ boolean getTransitionInZK();
+
+ // optional .ServerName destinationServer = 4;
+ boolean hasDestinationServer();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder();
+ }
+ public static final class CloseRegionRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements CloseRegionRequestOrBuilder {
+ // Use CloseRegionRequest.newBuilder() to construct.
+ private CloseRegionRequest(Builder builder) {
+ super(builder);
+ }
+ private CloseRegionRequest(boolean noInit) {}
+
+ private static final CloseRegionRequest defaultInstance;
+ public static CloseRegionRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CloseRegionRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionSpecifier region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // optional uint32 versionOfClosingNode = 2;
+ public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2;
+ private int versionOfClosingNode_;
+ public boolean hasVersionOfClosingNode() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getVersionOfClosingNode() {
+ return versionOfClosingNode_;
+ }
+
+ // optional bool transitionInZK = 3 [default = true];
+ public static final int TRANSITIONINZK_FIELD_NUMBER = 3;
+ private boolean transitionInZK_;
+ public boolean hasTransitionInZK() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public boolean getTransitionInZK() {
+ return transitionInZK_;
+ }
+
+ // optional .ServerName destinationServer = 4;
+ public static final int DESTINATIONSERVER_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_;
+ public boolean hasDestinationServer() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() {
+ return destinationServer_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() {
+ return destinationServer_;
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ versionOfClosingNode_ = 0;
+ transitionInZK_ = true;
+ destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasDestinationServer()) {
+ if (!getDestinationServer().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt32(2, versionOfClosingNode_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBool(3, transitionInZK_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeMessage(4, destinationServer_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(2, versionOfClosingNode_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(3, transitionInZK_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, destinationServer_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode());
+ if (hasVersionOfClosingNode()) {
+ result = result && (getVersionOfClosingNode()
+ == other.getVersionOfClosingNode());
+ }
+ result = result && (hasTransitionInZK() == other.hasTransitionInZK());
+ if (hasTransitionInZK()) {
+ result = result && (getTransitionInZK()
+ == other.getTransitionInZK());
+ }
+ result = result && (hasDestinationServer() == other.hasDestinationServer());
+ if (hasDestinationServer()) {
+ result = result && getDestinationServer()
+ .equals(other.getDestinationServer());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (hasVersionOfClosingNode()) {
+ hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER;
+ hash = (53 * hash) + getVersionOfClosingNode();
+ }
+ if (hasTransitionInZK()) {
+ hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getTransitionInZK());
+ }
+ if (hasDestinationServer()) {
+ hash = (37 * hash) + DESTINATIONSERVER_FIELD_NUMBER;
+ hash = (53 * hash) + getDestinationServer().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ getDestinationServerFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ versionOfClosingNode_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ transitionInZK_ = true;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (destinationServerBuilder_ == null) {
+ destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ } else {
+ destinationServerBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.versionOfClosingNode_ = versionOfClosingNode_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.transitionInZK_ = transitionInZK_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ if (destinationServerBuilder_ == null) {
+ result.destinationServer_ = destinationServer_;
+ } else {
+ result.destinationServer_ = destinationServerBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (other.hasVersionOfClosingNode()) {
+ setVersionOfClosingNode(other.getVersionOfClosingNode());
+ }
+ if (other.hasTransitionInZK()) {
+ setTransitionInZK(other.getTransitionInZK());
+ }
+ if (other.hasDestinationServer()) {
+ mergeDestinationServer(other.getDestinationServer());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ if (hasDestinationServer()) {
+ if (!getDestinationServer().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ versionOfClosingNode_ = input.readUInt32();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ transitionInZK_ = input.readBool();
+ break;
+ }
+ case 34: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder();
+ if (hasDestinationServer()) {
+ subBuilder.mergeFrom(getDestinationServer());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setDestinationServer(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionSpecifier region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // optional uint32 versionOfClosingNode = 2;
+ private int versionOfClosingNode_ ;
+ public boolean hasVersionOfClosingNode() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getVersionOfClosingNode() {
+ return versionOfClosingNode_;
+ }
+ public Builder setVersionOfClosingNode(int value) {
+ bitField0_ |= 0x00000002;
+ versionOfClosingNode_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearVersionOfClosingNode() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ versionOfClosingNode_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional bool transitionInZK = 3 [default = true];
+ private boolean transitionInZK_ = true;
+ public boolean hasTransitionInZK() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public boolean getTransitionInZK() {
+ return transitionInZK_;
+ }
+ public Builder setTransitionInZK(boolean value) {
+ bitField0_ |= 0x00000004;
+ transitionInZK_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearTransitionInZK() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ transitionInZK_ = true;
+ onChanged();
+ return this;
+ }
+
+ // optional .ServerName destinationServer = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_;
+ public boolean hasDestinationServer() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() {
+ if (destinationServerBuilder_ == null) {
+ return destinationServer_;
+ } else {
+ return destinationServerBuilder_.getMessage();
+ }
+ }
+ public Builder setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
+ if (destinationServerBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ destinationServer_ = value;
+ onChanged();
+ } else {
+ destinationServerBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ public Builder setDestinationServer(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
+ if (destinationServerBuilder_ == null) {
+ destinationServer_ = builderForValue.build();
+ onChanged();
+ } else {
+ destinationServerBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ public Builder mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
+ if (destinationServerBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008) &&
+ destinationServer_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
+ destinationServer_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial();
+ } else {
+ destinationServer_ = value;
+ }
+ onChanged();
+ } else {
+ destinationServerBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ public Builder clearDestinationServer() {
+ if (destinationServerBuilder_ == null) {
+ destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ onChanged();
+ } else {
+ destinationServerBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestinationServerBuilder() {
+ bitField0_ |= 0x00000008;
+ onChanged();
+ return getDestinationServerFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() {
+ if (destinationServerBuilder_ != null) {
+ return destinationServerBuilder_.getMessageOrBuilder();
+ } else {
+ return destinationServer_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
+ getDestinationServerFieldBuilder() {
+ if (destinationServerBuilder_ == null) {
+ destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
+ destinationServer_,
+ getParentForChildren(),
+ isClean());
+ destinationServer_ = null;
+ }
+ return destinationServerBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:CloseRegionRequest)
+ }
+
+ static {
+ defaultInstance = new CloseRegionRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:CloseRegionRequest)
+ }
+
+ public interface CloseRegionResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bool closed = 1;
+ boolean hasClosed();
+ boolean getClosed();
+ }
+ public static final class CloseRegionResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements CloseRegionResponseOrBuilder {
+ // Use CloseRegionResponse.newBuilder() to construct.
+ private CloseRegionResponse(Builder builder) {
+ super(builder);
+ }
+ private CloseRegionResponse(boolean noInit) {}
+
+ private static final CloseRegionResponse defaultInstance;
+ public static CloseRegionResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CloseRegionResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bool closed = 1;
+ public static final int CLOSED_FIELD_NUMBER = 1;
+ private boolean closed_;
+ public boolean hasClosed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public boolean getClosed() {
+ return closed_;
+ }
+
+ private void initFields() {
+ closed_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasClosed()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, closed_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, closed_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj;
+
+ boolean result = true;
+ result = result && (hasClosed() == other.hasClosed());
+ if (hasClosed()) {
+ result = result && (getClosed()
+ == other.getClosed());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasClosed()) {
+ hash = (37 * hash) + CLOSED_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getClosed());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ closed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.closed_ = closed_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this;
+ if (other.hasClosed()) {
+ setClosed(other.getClosed());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasClosed()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ closed_ = input.readBool();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bool closed = 1;
+ private boolean closed_ ;
+ public boolean hasClosed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public boolean getClosed() {
+ return closed_;
+ }
+ public Builder setClosed(boolean value) {
+ bitField0_ |= 0x00000001;
+ closed_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearClosed() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ closed_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:CloseRegionResponse)
+ }
+
+ static {
+ defaultInstance = new CloseRegionResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:CloseRegionResponse)
+ }
+
+ public interface FlushRegionRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionSpecifier region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
+
+ // optional uint64 ifOlderThanTs = 2;
+ boolean hasIfOlderThanTs();
+ long getIfOlderThanTs();
+ }
+ public static final class FlushRegionRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements FlushRegionRequestOrBuilder {
+ // Use FlushRegionRequest.newBuilder() to construct.
+ private FlushRegionRequest(Builder builder) {
+ super(builder);
+ }
+ private FlushRegionRequest(boolean noInit) {}
+
+ private static final FlushRegionRequest defaultInstance;
+ public static FlushRegionRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FlushRegionRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionSpecifier region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // optional uint64 ifOlderThanTs = 2;
+ public static final int IFOLDERTHANTS_FIELD_NUMBER = 2;
+ private long ifOlderThanTs_;
+ public boolean hasIfOlderThanTs() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getIfOlderThanTs() {
+ return ifOlderThanTs_;
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ ifOlderThanTs_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, ifOlderThanTs_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, ifOlderThanTs_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs());
+ if (hasIfOlderThanTs()) {
+ result = result && (getIfOlderThanTs()
+ == other.getIfOlderThanTs());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (hasIfOlderThanTs()) {
+ hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getIfOlderThanTs());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ ifOlderThanTs_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.ifOlderThanTs_ = ifOlderThanTs_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (other.hasIfOlderThanTs()) {
+ setIfOlderThanTs(other.getIfOlderThanTs());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ ifOlderThanTs_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionSpecifier region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // optional uint64 ifOlderThanTs = 2;
+ private long ifOlderThanTs_ ;
+ public boolean hasIfOlderThanTs() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getIfOlderThanTs() {
+ return ifOlderThanTs_;
+ }
+ public Builder setIfOlderThanTs(long value) {
+ bitField0_ |= 0x00000002;
+ ifOlderThanTs_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearIfOlderThanTs() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ ifOlderThanTs_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:FlushRegionRequest)
+ }
+
+ static {
+ defaultInstance = new FlushRegionRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:FlushRegionRequest)
+ }
+
+ public interface FlushRegionResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 lastFlushTime = 1;
+ boolean hasLastFlushTime();
+ long getLastFlushTime();
+
+ // optional bool flushed = 2;
+ boolean hasFlushed();
+ boolean getFlushed();
+ }
+ public static final class FlushRegionResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements FlushRegionResponseOrBuilder {
+ // Use FlushRegionResponse.newBuilder() to construct.
+ private FlushRegionResponse(Builder builder) {
+ super(builder);
+ }
+ private FlushRegionResponse(boolean noInit) {}
+
+ private static final FlushRegionResponse defaultInstance;
+ public static FlushRegionResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FlushRegionResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required uint64 lastFlushTime = 1;
+ public static final int LASTFLUSHTIME_FIELD_NUMBER = 1;
+ private long lastFlushTime_;
+ public boolean hasLastFlushTime() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getLastFlushTime() {
+ return lastFlushTime_;
+ }
+
+ // optional bool flushed = 2;
+ public static final int FLUSHED_FIELD_NUMBER = 2;
+ private boolean flushed_;
+ public boolean hasFlushed() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public boolean getFlushed() {
+ return flushed_;
+ }
+
+ private void initFields() {
+ lastFlushTime_ = 0L;
+ flushed_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasLastFlushTime()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, lastFlushTime_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBool(2, flushed_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, lastFlushTime_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(2, flushed_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj;
+
+ boolean result = true;
+ result = result && (hasLastFlushTime() == other.hasLastFlushTime());
+ if (hasLastFlushTime()) {
+ result = result && (getLastFlushTime()
+ == other.getLastFlushTime());
+ }
+ result = result && (hasFlushed() == other.hasFlushed());
+ if (hasFlushed()) {
+ result = result && (getFlushed()
+ == other.getFlushed());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasLastFlushTime()) {
+ hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLastFlushTime());
+ }
+ if (hasFlushed()) {
+ hash = (37 * hash) + FLUSHED_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getFlushed());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ lastFlushTime_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ flushed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.lastFlushTime_ = lastFlushTime_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.flushed_ = flushed_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this;
+ if (other.hasLastFlushTime()) {
+ setLastFlushTime(other.getLastFlushTime());
+ }
+ if (other.hasFlushed()) {
+ setFlushed(other.getFlushed());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasLastFlushTime()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ lastFlushTime_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ flushed_ = input.readBool();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required uint64 lastFlushTime = 1;
+ private long lastFlushTime_ ;
+ public boolean hasLastFlushTime() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getLastFlushTime() {
+ return lastFlushTime_;
+ }
+ public Builder setLastFlushTime(long value) {
+ bitField0_ |= 0x00000001;
+ lastFlushTime_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearLastFlushTime() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ lastFlushTime_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional bool flushed = 2;
+ private boolean flushed_ ;
+ public boolean hasFlushed() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public boolean getFlushed() {
+ return flushed_;
+ }
+ public Builder setFlushed(boolean value) {
+ bitField0_ |= 0x00000002;
+ flushed_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearFlushed() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ flushed_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:FlushRegionResponse)
+ }
+
+ static {
+ defaultInstance = new FlushRegionResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:FlushRegionResponse)
+ }
+
+ public interface SplitRegionRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionSpecifier region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
+
+ // optional bytes splitPoint = 2;
+ boolean hasSplitPoint();
+ com.google.protobuf.ByteString getSplitPoint();
+ }
+ public static final class SplitRegionRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements SplitRegionRequestOrBuilder {
+ // Use SplitRegionRequest.newBuilder() to construct.
+ private SplitRegionRequest(Builder builder) {
+ super(builder);
+ }
+ private SplitRegionRequest(boolean noInit) {}
+
+ private static final SplitRegionRequest defaultInstance;
+ public static SplitRegionRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SplitRegionRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionSpecifier region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // optional bytes splitPoint = 2;
+ public static final int SPLITPOINT_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString splitPoint_;
+ public boolean hasSplitPoint() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getSplitPoint() {
+ return splitPoint_;
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ splitPoint_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, splitPoint_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, splitPoint_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && (hasSplitPoint() == other.hasSplitPoint());
+ if (hasSplitPoint()) {
+ result = result && getSplitPoint()
+ .equals(other.getSplitPoint());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (hasSplitPoint()) {
+ hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER;
+ hash = (53 * hash) + getSplitPoint().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ splitPoint_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.splitPoint_ = splitPoint_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (other.hasSplitPoint()) {
+ setSplitPoint(other.getSplitPoint());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ splitPoint_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionSpecifier region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // optional bytes splitPoint = 2;
+ private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasSplitPoint() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getSplitPoint() {
+ return splitPoint_;
+ }
+ public Builder setSplitPoint(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ splitPoint_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearSplitPoint() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ splitPoint_ = getDefaultInstance().getSplitPoint();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:SplitRegionRequest)
+ }
+
+ static {
+ defaultInstance = new SplitRegionRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:SplitRegionRequest)
+ }
+
+ public interface SplitRegionResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class SplitRegionResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements SplitRegionResponseOrBuilder {
+ // Use SplitRegionResponse.newBuilder() to construct.
+ private SplitRegionResponse(Builder builder) {
+ super(builder);
+ }
+ private SplitRegionResponse(boolean noInit) {}
+
+ private static final SplitRegionResponse defaultInstance;
+ public static SplitRegionResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SplitRegionResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:SplitRegionResponse)
+ }
+
+ static {
+ defaultInstance = new SplitRegionResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:SplitRegionResponse)
+ }
+
+ public interface CompactRegionRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .RegionSpecifier region = 1;
+ boolean hasRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
+
+ // optional bool major = 2;
+ boolean hasMajor();
+ boolean getMajor();
+
+ // optional bytes family = 3;
+ boolean hasFamily();
+ com.google.protobuf.ByteString getFamily();
+ }
+ public static final class CompactRegionRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements CompactRegionRequestOrBuilder {
+ // Use CompactRegionRequest.newBuilder() to construct.
+ private CompactRegionRequest(Builder builder) {
+ super(builder);
+ }
+ private CompactRegionRequest(boolean noInit) {}
+
+ private static final CompactRegionRequest defaultInstance;
+ public static CompactRegionRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CompactRegionRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .RegionSpecifier region = 1;
+ public static final int REGION_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ return region_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ return region_;
+ }
+
+ // optional bool major = 2;
+ public static final int MAJOR_FIELD_NUMBER = 2;
+ private boolean major_;
+ public boolean hasMajor() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public boolean getMajor() {
+ return major_;
+ }
+
+ // optional bytes family = 3;
+ public static final int FAMILY_FIELD_NUMBER = 3;
+ private com.google.protobuf.ByteString family_;
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+
+ private void initFields() {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ major_ = false;
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRegion()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBool(2, major_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, family_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, region_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(2, major_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, family_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRegion() == other.hasRegion());
+ if (hasRegion()) {
+ result = result && getRegion()
+ .equals(other.getRegion());
+ }
+ result = result && (hasMajor() == other.hasMajor());
+ if (hasMajor()) {
+ result = result && (getMajor()
+ == other.getMajor());
+ }
+ result = result && (hasFamily() == other.hasFamily());
+ if (hasFamily()) {
+ result = result && getFamily()
+ .equals(other.getFamily());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRegion()) {
+ hash = (37 * hash) + REGION_FIELD_NUMBER;
+ hash = (53 * hash) + getRegion().hashCode();
+ }
+ if (hasMajor()) {
+ hash = (37 * hash) + MAJOR_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getMajor());
+ }
+ if (hasFamily()) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamily().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getRegionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ major_ = false;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (regionBuilder_ == null) {
+ result.region_ = region_;
+ } else {
+ result.region_ = regionBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.major_ = major_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.family_ = family_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this;
+ if (other.hasRegion()) {
+ mergeRegion(other.getRegion());
+ }
+ if (other.hasMajor()) {
+ setMajor(other.getMajor());
+ }
+ if (other.hasFamily()) {
+ setFamily(other.getFamily());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRegion()) {
+
+ return false;
+ }
+ if (!getRegion().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasRegion()) {
+ subBuilder.mergeFrom(getRegion());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegion(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ major_ = input.readBool();
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ family_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .RegionSpecifier region = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
+ public boolean hasRegion() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
+ if (regionBuilder_ == null) {
+ return region_;
+ } else {
+ return regionBuilder_.getMessage();
+ }
+ }
+ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ region_ = value;
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setRegion(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
+ if (regionBuilder_ == null) {
+ region_ = builderForValue.build();
+ onChanged();
+ } else {
+ regionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
+ if (regionBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
+ region_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
+ } else {
+ region_ = value;
+ }
+ onChanged();
+ } else {
+ regionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearRegion() {
+ if (regionBuilder_ == null) {
+ region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
+ onChanged();
+ } else {
+ regionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getRegionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
+ if (regionBuilder_ != null) {
+ return regionBuilder_.getMessageOrBuilder();
+ } else {
+ return region_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ getRegionFieldBuilder() {
+ if (regionBuilder_ == null) {
+ regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
+ region_,
+ getParentForChildren(),
+ isClean());
+ region_ = null;
+ }
+ return regionBuilder_;
+ }
+
+ // optional bool major = 2;
+ private boolean major_ ;
+ public boolean hasMajor() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public boolean getMajor() {
+ return major_;
+ }
+ public Builder setMajor(boolean value) {
+ bitField0_ |= 0x00000002;
+ major_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMajor() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ major_ = false;
+ onChanged();
+ return this;
+ }
+
+ // optional bytes family = 3;
+ private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+ public Builder setFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ family_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearFamily() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ family_ = getDefaultInstance().getFamily();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:CompactRegionRequest)
+ }
+
+ static {
+ defaultInstance = new CompactRegionRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:CompactRegionRequest)
+ }
+
+ public interface CompactRegionResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class CompactRegionResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements CompactRegionResponseOrBuilder {
+ // Use CompactRegionResponse.newBuilder() to construct.
+ private CompactRegionResponse(Builder builder) {
+ super(builder);
+ }
+ private CompactRegionResponse(boolean noInit) {}
+
+ private static final CompactRegionResponse defaultInstance;
+ public static CompactRegionResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CompactRegionResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:CompactRegionResponse)
+ }
+
+ static {
+ defaultInstance = new CompactRegionResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:CompactRegionResponse)
+ }
+
+ public interface UUIDOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 leastSigBits = 1;
+ boolean hasLeastSigBits();
+ long getLeastSigBits();
+
+ // required uint64 mostSigBits = 2;
+ boolean hasMostSigBits();
+ long getMostSigBits();
+ }
+ public static final class UUID extends
+ com.google.protobuf.GeneratedMessage
+ implements UUIDOrBuilder {
+ // Use UUID.newBuilder() to construct.
+ private UUID(Builder builder) {
+ super(builder);
+ }
+ private UUID(boolean noInit) {}
+
+ private static final UUID defaultInstance;
+ public static UUID getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public UUID getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required uint64 leastSigBits = 1;
+ public static final int LEASTSIGBITS_FIELD_NUMBER = 1;
+ private long leastSigBits_;
+ public boolean hasLeastSigBits() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getLeastSigBits() {
+ return leastSigBits_;
+ }
+
+ // required uint64 mostSigBits = 2;
+ public static final int MOSTSIGBITS_FIELD_NUMBER = 2;
+ private long mostSigBits_;
+ public boolean hasMostSigBits() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getMostSigBits() {
+ return mostSigBits_;
+ }
+
+ private void initFields() {
+ leastSigBits_ = 0L;
+ mostSigBits_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasLeastSigBits()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasMostSigBits()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, leastSigBits_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, mostSigBits_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, leastSigBits_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, mostSigBits_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj;
+
+ boolean result = true;
+ result = result && (hasLeastSigBits() == other.hasLeastSigBits());
+ if (hasLeastSigBits()) {
+ result = result && (getLeastSigBits()
+ == other.getLeastSigBits());
+ }
+ result = result && (hasMostSigBits() == other.hasMostSigBits());
+ if (hasMostSigBits()) {
+ result = result && (getMostSigBits()
+ == other.getMostSigBits());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasLeastSigBits()) {
+ hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLeastSigBits());
+ }
+ if (hasMostSigBits()) {
+ hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getMostSigBits());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ leastSigBits_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ mostSigBits_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.leastSigBits_ = leastSigBits_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.mostSigBits_ = mostSigBits_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this;
+ if (other.hasLeastSigBits()) {
+ setLeastSigBits(other.getLeastSigBits());
+ }
+ if (other.hasMostSigBits()) {
+ setMostSigBits(other.getMostSigBits());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasLeastSigBits()) {
+
+ return false;
+ }
+ if (!hasMostSigBits()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ leastSigBits_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ mostSigBits_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required uint64 leastSigBits = 1;
+ private long leastSigBits_ ;
+ public boolean hasLeastSigBits() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getLeastSigBits() {
+ return leastSigBits_;
+ }
+ public Builder setLeastSigBits(long value) {
+ bitField0_ |= 0x00000001;
+ leastSigBits_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearLeastSigBits() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ leastSigBits_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint64 mostSigBits = 2;
+ private long mostSigBits_ ;
+ public boolean hasMostSigBits() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getMostSigBits() {
+ return mostSigBits_;
+ }
+ public Builder setMostSigBits(long value) {
+ bitField0_ |= 0x00000002;
+ mostSigBits_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMostSigBits() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ mostSigBits_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:UUID)
+ }
+
+ static {
+ defaultInstance = new UUID(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:UUID)
+ }
+
+ public interface WALEntryOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .WALEntry.WALKey key = 1;
+ boolean hasKey();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder();
+
+ // required .WALEntry.WALEdit edit = 2;
+ boolean hasEdit();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder();
+ }
+ public static final class WALEntry extends
+ com.google.protobuf.GeneratedMessage
+ implements WALEntryOrBuilder {
+ // Use WALEntry.newBuilder() to construct.
+ private WALEntry(Builder builder) {
+ super(builder);
+ }
+ private WALEntry(boolean noInit) {}
+
+ private static final WALEntry defaultInstance;
+ public static WALEntry getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public WALEntry getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable;
+ }
+
+ public interface WALKeyOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes encodedRegionName = 1;
+ boolean hasEncodedRegionName();
+ com.google.protobuf.ByteString getEncodedRegionName();
+
+ // required bytes tableName = 2;
+ boolean hasTableName();
+ com.google.protobuf.ByteString getTableName();
+
+ // required uint64 logSequenceNumber = 3;
+ boolean hasLogSequenceNumber();
+ long getLogSequenceNumber();
+
+ // required uint64 writeTime = 4;
+ boolean hasWriteTime();
+ long getWriteTime();
+
+ // optional .UUID clusterId = 5;
+ boolean hasClusterId();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder();
+ }
+ public static final class WALKey extends
+ com.google.protobuf.GeneratedMessage
+ implements WALKeyOrBuilder {
+ // Use WALKey.newBuilder() to construct.
+ private WALKey(Builder builder) {
+ super(builder);
+ }
+ private WALKey(boolean noInit) {}
+
+ private static final WALKey defaultInstance;
+ public static WALKey getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public WALKey getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bytes encodedRegionName = 1;
+ public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString encodedRegionName_;
+ public boolean hasEncodedRegionName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getEncodedRegionName() {
+ return encodedRegionName_;
+ }
+
+ // required bytes tableName = 2;
+ public static final int TABLENAME_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString tableName_;
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getTableName() {
+ return tableName_;
+ }
+
+ // required uint64 logSequenceNumber = 3;
+ public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3;
+ private long logSequenceNumber_;
+ public boolean hasLogSequenceNumber() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getLogSequenceNumber() {
+ return logSequenceNumber_;
+ }
+
+ // required uint64 writeTime = 4;
+ public static final int WRITETIME_FIELD_NUMBER = 4;
+ private long writeTime_;
+ public boolean hasWriteTime() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getWriteTime() {
+ return writeTime_;
+ }
+
+ // optional .UUID clusterId = 5;
+ public static final int CLUSTERID_FIELD_NUMBER = 5;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_;
+ public boolean hasClusterId() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() {
+ return clusterId_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() {
+ return clusterId_;
+ }
+
+ private void initFields() {
+ encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+ tableName_ = com.google.protobuf.ByteString.EMPTY;
+ logSequenceNumber_ = 0L;
+ writeTime_ = 0L;
+ clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasEncodedRegionName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasLogSequenceNumber()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasWriteTime()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasClusterId()) {
+ if (!getClusterId().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, encodedRegionName_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, logSequenceNumber_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt64(4, writeTime_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeMessage(5, clusterId_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, encodedRegionName_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, logSequenceNumber_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(4, writeTime_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(5, clusterId_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj;
+
+ boolean result = true;
+ result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
+ if (hasEncodedRegionName()) {
+ result = result && getEncodedRegionName()
+ .equals(other.getEncodedRegionName());
+ }
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
+ if (hasLogSequenceNumber()) {
+ result = result && (getLogSequenceNumber()
+ == other.getLogSequenceNumber());
+ }
+ result = result && (hasWriteTime() == other.hasWriteTime());
+ if (hasWriteTime()) {
+ result = result && (getWriteTime()
+ == other.getWriteTime());
+ }
+ result = result && (hasClusterId() == other.hasClusterId());
+ if (hasClusterId()) {
+ result = result && getClusterId()
+ .equals(other.getClusterId());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasEncodedRegionName()) {
+ hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getEncodedRegionName().hashCode();
+ }
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLENAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (hasLogSequenceNumber()) {
+ hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLogSequenceNumber());
+ }
+ if (hasWriteTime()) {
+ hash = (37 * hash) + WRITETIME_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getWriteTime());
+ }
+ if (hasClusterId()) {
+ hash = (37 * hash) + CLUSTERID_FIELD_NUMBER;
+ hash = (53 * hash) + getClusterId().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getClusterIdFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ tableName_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ logSequenceNumber_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ writeTime_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ if (clusterIdBuilder_ == null) {
+ clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance();
+ } else {
+ clusterIdBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000010);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.encodedRegionName_ = encodedRegionName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.tableName_ = tableName_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.logSequenceNumber_ = logSequenceNumber_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.writeTime_ = writeTime_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ if (clusterIdBuilder_ == null) {
+ result.clusterId_ = clusterId_;
+ } else {
+ result.clusterId_ = clusterIdBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this;
+ if (other.hasEncodedRegionName()) {
+ setEncodedRegionName(other.getEncodedRegionName());
+ }
+ if (other.hasTableName()) {
+ setTableName(other.getTableName());
+ }
+ if (other.hasLogSequenceNumber()) {
+ setLogSequenceNumber(other.getLogSequenceNumber());
+ }
+ if (other.hasWriteTime()) {
+ setWriteTime(other.getWriteTime());
+ }
+ if (other.hasClusterId()) {
+ mergeClusterId(other.getClusterId());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasEncodedRegionName()) {
+
+ return false;
+ }
+ if (!hasTableName()) {
+
+ return false;
+ }
+ if (!hasLogSequenceNumber()) {
+
+ return false;
+ }
+ if (!hasWriteTime()) {
+
+ return false;
+ }
+ if (hasClusterId()) {
+ if (!getClusterId().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ encodedRegionName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ tableName_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ logSequenceNumber_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ writeTime_ = input.readUInt64();
+ break;
+ }
+ case 42: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder();
+ if (hasClusterId()) {
+ subBuilder.mergeFrom(getClusterId());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setClusterId(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bytes encodedRegionName = 1;
+ private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasEncodedRegionName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getEncodedRegionName() {
+ return encodedRegionName_;
+ }
+ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ encodedRegionName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearEncodedRegionName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
+ onChanged();
+ return this;
+ }
+
+ // required bytes tableName = 2;
+ private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getTableName() {
+ return tableName_;
+ }
+ public Builder setTableName(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ tableName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearTableName() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ tableName_ = getDefaultInstance().getTableName();
+ onChanged();
+ return this;
+ }
+
+ // required uint64 logSequenceNumber = 3;
+ private long logSequenceNumber_ ;
+ public boolean hasLogSequenceNumber() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getLogSequenceNumber() {
+ return logSequenceNumber_;
+ }
+ public Builder setLogSequenceNumber(long value) {
+ bitField0_ |= 0x00000004;
+ logSequenceNumber_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearLogSequenceNumber() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ logSequenceNumber_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint64 writeTime = 4;
+ private long writeTime_ ;
+ public boolean hasWriteTime() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getWriteTime() {
+ return writeTime_;
+ }
+ public Builder setWriteTime(long value) {
+ bitField0_ |= 0x00000008;
+ writeTime_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearWriteTime() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ writeTime_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional .UUID clusterId = 5;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_;
+ public boolean hasClusterId() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() {
+ if (clusterIdBuilder_ == null) {
+ return clusterId_;
+ } else {
+ return clusterIdBuilder_.getMessage();
+ }
+ }
+ public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) {
+ if (clusterIdBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ clusterId_ = value;
+ onChanged();
+ } else {
+ clusterIdBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000010;
+ return this;
+ }
+ public Builder setClusterId(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) {
+ if (clusterIdBuilder_ == null) {
+ clusterId_ = builderForValue.build();
+ onChanged();
+ } else {
+ clusterIdBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000010;
+ return this;
+ }
+ public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) {
+ if (clusterIdBuilder_ == null) {
+ if (((bitField0_ & 0x00000010) == 0x00000010) &&
+ clusterId_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) {
+ clusterId_ =
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial();
+ } else {
+ clusterId_ = value;
+ }
+ onChanged();
+ } else {
+ clusterIdBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000010;
+ return this;
+ }
+ public Builder clearClusterId() {
+ if (clusterIdBuilder_ == null) {
+ clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance();
+ onChanged();
+ } else {
+ clusterIdBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000010);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() {
+ bitField0_ |= 0x00000010;
+ onChanged();
+ return getClusterIdFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() {
+ if (clusterIdBuilder_ != null) {
+ return clusterIdBuilder_.getMessageOrBuilder();
+ } else {
+ return clusterId_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>
+ getClusterIdFieldBuilder() {
+ if (clusterIdBuilder_ == null) {
+ clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>(
+ clusterId_,
+ getParentForChildren(),
+ isClean());
+ clusterId_ = null;
+ }
+ return clusterIdBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:WALEntry.WALKey)
+ }
+
+ static {
+ defaultInstance = new WALKey(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:WALEntry.WALKey)
+ }
+
+ public interface WALEditOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated bytes keyValueBytes = 1;
+ java.util.List getKeyValueBytesList();
+ int getKeyValueBytesCount();
+ com.google.protobuf.ByteString getKeyValueBytes(int index);
+
+ // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2;
+ java.util.List
+ getFamilyScopeList();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index);
+ int getFamilyScopeCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>
+ getFamilyScopeOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder(
+ int index);
+ }
+ public static final class WALEdit extends
+ com.google.protobuf.GeneratedMessage
+ implements WALEditOrBuilder {
+ // Use WALEdit.newBuilder() to construct.
+ private WALEdit(Builder builder) {
+ super(builder);
+ }
+ private WALEdit(boolean noInit) {}
+
+ private static final WALEdit defaultInstance;
+ public static WALEdit getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public WALEdit getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable;
+ }
+
+ public enum ScopeType
+ implements com.google.protobuf.ProtocolMessageEnum {
+ REPLICATION_SCOPE_LOCAL(0, 0),
+ REPLICATION_SCOPE_GLOBAL(1, 1),
+ ;
+
+ public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0;
+ public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1;
+
+
+ public final int getNumber() { return value; }
+
+ public static ScopeType valueOf(int value) {
+ switch (value) {
+ case 0: return REPLICATION_SCOPE_LOCAL;
+ case 1: return REPLICATION_SCOPE_GLOBAL;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public ScopeType findValueByNumber(int number) {
+ return ScopeType.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final ScopeType[] VALUES = {
+ REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL,
+ };
+
+ public static ScopeType valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private ScopeType(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType)
+ }
+
+ public interface FamilyScopeOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes family = 1;
+ boolean hasFamily();
+ com.google.protobuf.ByteString getFamily();
+
+ // required .WALEntry.WALEdit.ScopeType scopeType = 2;
+ boolean hasScopeType();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType();
+ }
+ public static final class FamilyScope extends
+ com.google.protobuf.GeneratedMessage
+ implements FamilyScopeOrBuilder {
+ // Use FamilyScope.newBuilder() to construct.
+ private FamilyScope(Builder builder) {
+ super(builder);
+ }
+ private FamilyScope(boolean noInit) {}
+
+ private static final FamilyScope defaultInstance;
+ public static FamilyScope getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FamilyScope getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bytes family = 1;
+ public static final int FAMILY_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString family_;
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+
+ // required .WALEntry.WALEdit.ScopeType scopeType = 2;
+ public static final int SCOPETYPE_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_;
+ public boolean hasScopeType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() {
+ return scopeType_;
+ }
+
+ private void initFields() {
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasFamily()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasScopeType()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeEnum(2, scopeType_.getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(2, scopeType_.getNumber());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj;
+
+ boolean result = true;
+ result = result && (hasFamily() == other.hasFamily());
+ if (hasFamily()) {
+ result = result && getFamily()
+ .equals(other.getFamily());
+ }
+ result = result && (hasScopeType() == other.hasScopeType());
+ if (hasScopeType()) {
+ result = result &&
+ (getScopeType() == other.getScopeType());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasFamily()) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamily().hashCode();
+ }
+ if (hasScopeType()) {
+ hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getScopeType());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.family_ = family_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.scopeType_ = scopeType_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this;
+ if (other.hasFamily()) {
+ setFamily(other.getFamily());
+ }
+ if (other.hasScopeType()) {
+ setScopeType(other.getScopeType());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasFamily()) {
+
+ return false;
+ }
+ if (!hasScopeType()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ family_ = input.readBytes();
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ bitField0_ |= 0x00000002;
+ scopeType_ = value;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bytes family = 1;
+ private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+ public Builder setFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ family_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearFamily() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ family_ = getDefaultInstance().getFamily();
+ onChanged();
+ return this;
+ }
+
+ // required .WALEntry.WALEdit.ScopeType scopeType = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL;
+ public boolean hasScopeType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() {
+ return scopeType_;
+ }
+ public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ scopeType_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearScopeType() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope)
+ }
+
+ static {
+ defaultInstance = new FamilyScope(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope)
+ }
+
+ // repeated bytes keyValueBytes = 1;
+ public static final int KEYVALUEBYTES_FIELD_NUMBER = 1;
+ private java.util.List keyValueBytes_;
+ public java.util.List
+ getKeyValueBytesList() {
+ return keyValueBytes_;
+ }
+ public int getKeyValueBytesCount() {
+ return keyValueBytes_.size();
+ }
+ public com.google.protobuf.ByteString getKeyValueBytes(int index) {
+ return keyValueBytes_.get(index);
+ }
+
+ // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2;
+ public static final int FAMILYSCOPE_FIELD_NUMBER = 2;
+ private java.util.List familyScope_;
+ public java.util.List getFamilyScopeList() {
+ return familyScope_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>
+ getFamilyScopeOrBuilderList() {
+ return familyScope_;
+ }
+ public int getFamilyScopeCount() {
+ return familyScope_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) {
+ return familyScope_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder(
+ int index) {
+ return familyScope_.get(index);
+ }
+
+ private void initFields() {
+ keyValueBytes_ = java.util.Collections.emptyList();;
+ familyScope_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getFamilyScopeCount(); i++) {
+ if (!getFamilyScope(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < keyValueBytes_.size(); i++) {
+ output.writeBytes(1, keyValueBytes_.get(i));
+ }
+ for (int i = 0; i < familyScope_.size(); i++) {
+ output.writeMessage(2, familyScope_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < keyValueBytes_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(keyValueBytes_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getKeyValueBytesList().size();
+ }
+ for (int i = 0; i < familyScope_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, familyScope_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj;
+
+ boolean result = true;
+ result = result && getKeyValueBytesList()
+ .equals(other.getKeyValueBytesList());
+ result = result && getFamilyScopeList()
+ .equals(other.getFamilyScopeList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getKeyValueBytesCount() > 0) {
+ hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER;
+ hash = (53 * hash) + getKeyValueBytesList().hashCode();
+ }
+ if (getFamilyScopeCount() > 0) {
+ hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER;
+ hash = (53 * hash) + getFamilyScopeList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getFamilyScopeFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ keyValueBytes_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (familyScopeBuilder_ == null) {
+ familyScope_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ familyScopeBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.keyValueBytes_ = keyValueBytes_;
+ if (familyScopeBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ familyScope_ = java.util.Collections.unmodifiableList(familyScope_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.familyScope_ = familyScope_;
+ } else {
+ result.familyScope_ = familyScopeBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this;
+ if (!other.keyValueBytes_.isEmpty()) {
+ if (keyValueBytes_.isEmpty()) {
+ keyValueBytes_ = other.keyValueBytes_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureKeyValueBytesIsMutable();
+ keyValueBytes_.addAll(other.keyValueBytes_);
+ }
+ onChanged();
+ }
+ if (familyScopeBuilder_ == null) {
+ if (!other.familyScope_.isEmpty()) {
+ if (familyScope_.isEmpty()) {
+ familyScope_ = other.familyScope_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureFamilyScopeIsMutable();
+ familyScope_.addAll(other.familyScope_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.familyScope_.isEmpty()) {
+ if (familyScopeBuilder_.isEmpty()) {
+ familyScopeBuilder_.dispose();
+ familyScopeBuilder_ = null;
+ familyScope_ = other.familyScope_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ familyScopeBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getFamilyScopeFieldBuilder() : null;
+ } else {
+ familyScopeBuilder_.addAllMessages(other.familyScope_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getFamilyScopeCount(); i++) {
+ if (!getFamilyScope(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ ensureKeyValueBytesIsMutable();
+ keyValueBytes_.add(input.readBytes());
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addFamilyScope(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated bytes keyValueBytes = 1;
+ private java.util.List keyValueBytes_ = java.util.Collections.emptyList();;
+ private void ensureKeyValueBytesIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ keyValueBytes_ = new java.util.ArrayList(keyValueBytes_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List
+ getKeyValueBytesList() {
+ return java.util.Collections.unmodifiableList(keyValueBytes_);
+ }
+ public int getKeyValueBytesCount() {
+ return keyValueBytes_.size();
+ }
+ public com.google.protobuf.ByteString getKeyValueBytes(int index) {
+ return keyValueBytes_.get(index);
+ }
+ public Builder setKeyValueBytes(
+ int index, com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureKeyValueBytesIsMutable();
+ keyValueBytes_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addKeyValueBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureKeyValueBytesIsMutable();
+ keyValueBytes_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllKeyValueBytes(
+ java.lang.Iterable extends com.google.protobuf.ByteString> values) {
+ ensureKeyValueBytesIsMutable();
+ super.addAll(values, keyValueBytes_);
+ onChanged();
+ return this;
+ }
+ public Builder clearKeyValueBytes() {
+ keyValueBytes_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2;
+ private java.util.List familyScope_ =
+ java.util.Collections.emptyList();
+ private void ensureFamilyScopeIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ familyScope_ = new java.util.ArrayList(familyScope_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_;
+
+ public java.util.List getFamilyScopeList() {
+ if (familyScopeBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(familyScope_);
+ } else {
+ return familyScopeBuilder_.getMessageList();
+ }
+ }
+ public int getFamilyScopeCount() {
+ if (familyScopeBuilder_ == null) {
+ return familyScope_.size();
+ } else {
+ return familyScopeBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) {
+ if (familyScopeBuilder_ == null) {
+ return familyScope_.get(index);
+ } else {
+ return familyScopeBuilder_.getMessage(index);
+ }
+ }
+ public Builder setFamilyScope(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) {
+ if (familyScopeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyScopeIsMutable();
+ familyScope_.set(index, value);
+ onChanged();
+ } else {
+ familyScopeBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setFamilyScope(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) {
+ if (familyScopeBuilder_ == null) {
+ ensureFamilyScopeIsMutable();
+ familyScope_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ familyScopeBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) {
+ if (familyScopeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyScopeIsMutable();
+ familyScope_.add(value);
+ onChanged();
+ } else {
+ familyScopeBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addFamilyScope(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) {
+ if (familyScopeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyScopeIsMutable();
+ familyScope_.add(index, value);
+ onChanged();
+ } else {
+ familyScopeBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addFamilyScope(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) {
+ if (familyScopeBuilder_ == null) {
+ ensureFamilyScopeIsMutable();
+ familyScope_.add(builderForValue.build());
+ onChanged();
+ } else {
+ familyScopeBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addFamilyScope(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) {
+ if (familyScopeBuilder_ == null) {
+ ensureFamilyScopeIsMutable();
+ familyScope_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ familyScopeBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllFamilyScope(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope> values) {
+ if (familyScopeBuilder_ == null) {
+ ensureFamilyScopeIsMutable();
+ super.addAll(values, familyScope_);
+ onChanged();
+ } else {
+ familyScopeBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearFamilyScope() {
+ if (familyScopeBuilder_ == null) {
+ familyScope_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ } else {
+ familyScopeBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removeFamilyScope(int index) {
+ if (familyScopeBuilder_ == null) {
+ ensureFamilyScopeIsMutable();
+ familyScope_.remove(index);
+ onChanged();
+ } else {
+ familyScopeBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder(
+ int index) {
+ return getFamilyScopeFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder(
+ int index) {
+ if (familyScopeBuilder_ == null) {
+ return familyScope_.get(index); } else {
+ return familyScopeBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>
+ getFamilyScopeOrBuilderList() {
+ if (familyScopeBuilder_ != null) {
+ return familyScopeBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(familyScope_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() {
+ return getFamilyScopeFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder(
+ int index) {
+ return getFamilyScopeFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance());
+ }
+ public java.util.List
+ getFamilyScopeBuilderList() {
+ return getFamilyScopeFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>
+ getFamilyScopeFieldBuilder() {
+ if (familyScopeBuilder_ == null) {
+ familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>(
+ familyScope_,
+ ((bitField0_ & 0x00000002) == 0x00000002),
+ getParentForChildren(),
+ isClean());
+ familyScope_ = null;
+ }
+ return familyScopeBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit)
+ }
+
+ static {
+ defaultInstance = new WALEdit(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:WALEntry.WALEdit)
+ }
+
+ private int bitField0_;
+ // required .WALEntry.WALKey key = 1;
+ public static final int KEY_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_;
+ public boolean hasKey() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() {
+ return key_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() {
+ return key_;
+ }
+
+ // required .WALEntry.WALEdit edit = 2;
+ public static final int EDIT_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_;
+ public boolean hasEdit() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() {
+ return edit_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() {
+ return edit_;
+ }
+
+ private void initFields() {
+ key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance();
+ edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasKey()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasEdit()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getKey().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getEdit().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, key_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, edit_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, key_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, edit_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj;
+
+ boolean result = true;
+ result = result && (hasKey() == other.hasKey());
+ if (hasKey()) {
+ result = result && getKey()
+ .equals(other.getKey());
+ }
+ result = result && (hasEdit() == other.hasEdit());
+ if (hasEdit()) {
+ result = result && getEdit()
+ .equals(other.getEdit());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasKey()) {
+ hash = (37 * hash) + KEY_FIELD_NUMBER;
+ hash = (53 * hash) + getKey().hashCode();
+ }
+ if (hasEdit()) {
+ hash = (37 * hash) + EDIT_FIELD_NUMBER;
+ hash = (53 * hash) + getEdit().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getKeyFieldBuilder();
+ getEditFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (keyBuilder_ == null) {
+ key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance();
+ } else {
+ keyBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (editBuilder_ == null) {
+ edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance();
+ } else {
+ editBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (keyBuilder_ == null) {
+ result.key_ = key_;
+ } else {
+ result.key_ = keyBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (editBuilder_ == null) {
+ result.edit_ = edit_;
+ } else {
+ result.edit_ = editBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this;
+ if (other.hasKey()) {
+ mergeKey(other.getKey());
+ }
+ if (other.hasEdit()) {
+ mergeEdit(other.getEdit());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasKey()) {
+
+ return false;
+ }
+ if (!hasEdit()) {
+
+ return false;
+ }
+ if (!getKey().isInitialized()) {
+
+ return false;
+ }
+ if (!getEdit().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder();
+ if (hasKey()) {
+ subBuilder.mergeFrom(getKey());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setKey(subBuilder.buildPartial());
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder();
+ if (hasEdit()) {
+ subBuilder.mergeFrom(getEdit());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setEdit(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .WALEntry.WALKey key = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> keyBuilder_;
+ public boolean hasKey() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() {
+ if (keyBuilder_ == null) {
+ return key_;
+ } else {
+ return keyBuilder_.getMessage();
+ }
+ }
+ public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) {
+ if (keyBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ key_ = value;
+ onChanged();
+ } else {
+ keyBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setKey(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) {
+ if (keyBuilder_ == null) {
+ key_ = builderForValue.build();
+ onChanged();
+ } else {
+ keyBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) {
+ if (keyBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ key_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) {
+ key_ =
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(key_).mergeFrom(value).buildPartial();
+ } else {
+ key_ = value;
+ }
+ onChanged();
+ } else {
+ keyBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearKey() {
+ if (keyBuilder_ == null) {
+ key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance();
+ onChanged();
+ } else {
+ keyBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getKeyBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getKeyFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() {
+ if (keyBuilder_ != null) {
+ return keyBuilder_.getMessageOrBuilder();
+ } else {
+ return key_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>
+ getKeyFieldBuilder() {
+ if (keyBuilder_ == null) {
+ keyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>(
+ key_,
+ getParentForChildren(),
+ isClean());
+ key_ = null;
+ }
+ return keyBuilder_;
+ }
+
+ // required .WALEntry.WALEdit edit = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_;
+ public boolean hasEdit() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() {
+ if (editBuilder_ == null) {
+ return edit_;
+ } else {
+ return editBuilder_.getMessage();
+ }
+ }
+ public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) {
+ if (editBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ edit_ = value;
+ onChanged();
+ } else {
+ editBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder setEdit(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) {
+ if (editBuilder_ == null) {
+ edit_ = builderForValue.build();
+ onChanged();
+ } else {
+ editBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) {
+ if (editBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ edit_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) {
+ edit_ =
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial();
+ } else {
+ edit_ = value;
+ }
+ onChanged();
+ } else {
+ editBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder clearEdit() {
+ if (editBuilder_ == null) {
+ edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance();
+ onChanged();
+ } else {
+ editBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getEditFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() {
+ if (editBuilder_ != null) {
+ return editBuilder_.getMessageOrBuilder();
+ } else {
+ return edit_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>
+ getEditFieldBuilder() {
+ if (editBuilder_ == null) {
+ editBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>(
+ edit_,
+ getParentForChildren(),
+ isClean());
+ edit_ = null;
+ }
+ return editBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:WALEntry)
+ }
+
+ static {
+ defaultInstance = new WALEntry(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:WALEntry)
+ }
+
+ public interface ReplicateWALEntryRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .WALEntry entry = 1;
+ java.util.List
+ getEntryList();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index);
+ int getEntryCount();
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>
+ getEntryOrBuilderList();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder(
+ int index);
+ }
+ public static final class ReplicateWALEntryRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements ReplicateWALEntryRequestOrBuilder {
+ // Use ReplicateWALEntryRequest.newBuilder() to construct.
+ private ReplicateWALEntryRequest(Builder builder) {
+ super(builder);
+ }
+ private ReplicateWALEntryRequest(boolean noInit) {}
+
+ private static final ReplicateWALEntryRequest defaultInstance;
+ public static ReplicateWALEntryRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ReplicateWALEntryRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable;
+ }
+
+ // repeated .WALEntry entry = 1;
+ public static final int ENTRY_FIELD_NUMBER = 1;
+ private java.util.List entry_;
+ public java.util.List getEntryList() {
+ return entry_;
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>
+ getEntryOrBuilderList() {
+ return entry_;
+ }
+ public int getEntryCount() {
+ return entry_.size();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) {
+ return entry_.get(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder(
+ int index) {
+ return entry_.get(index);
+ }
+
+ private void initFields() {
+ entry_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getEntryCount(); i++) {
+ if (!getEntry(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < entry_.size(); i++) {
+ output.writeMessage(1, entry_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < entry_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, entry_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj;
+
+ boolean result = true;
+ result = result && getEntryList()
+ .equals(other.getEntryList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getEntryCount() > 0) {
+ hash = (37 * hash) + ENTRY_FIELD_NUMBER;
+ hash = (53 * hash) + getEntryList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getEntryFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (entryBuilder_ == null) {
+ entry_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ entryBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this);
+ int from_bitField0_ = bitField0_;
+ if (entryBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ entry_ = java.util.Collections.unmodifiableList(entry_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.entry_ = entry_;
+ } else {
+ result.entry_ = entryBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this;
+ if (entryBuilder_ == null) {
+ if (!other.entry_.isEmpty()) {
+ if (entry_.isEmpty()) {
+ entry_ = other.entry_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureEntryIsMutable();
+ entry_.addAll(other.entry_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.entry_.isEmpty()) {
+ if (entryBuilder_.isEmpty()) {
+ entryBuilder_.dispose();
+ entryBuilder_ = null;
+ entry_ = other.entry_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ entryBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getEntryFieldBuilder() : null;
+ } else {
+ entryBuilder_.addAllMessages(other.entry_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getEntryCount(); i++) {
+ if (!getEntry(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder();
+ input.readMessage(subBuilder, extensionRegistry);
+ addEntry(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated .WALEntry entry = 1;
+ private java.util.List entry_ =
+ java.util.Collections.emptyList();
+ private void ensureEntryIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ entry_ = new java.util.ArrayList(entry_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> entryBuilder_;
+
+ public java.util.List getEntryList() {
+ if (entryBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(entry_);
+ } else {
+ return entryBuilder_.getMessageList();
+ }
+ }
+ public int getEntryCount() {
+ if (entryBuilder_ == null) {
+ return entry_.size();
+ } else {
+ return entryBuilder_.getCount();
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getEntry(int index) {
+ if (entryBuilder_ == null) {
+ return entry_.get(index);
+ } else {
+ return entryBuilder_.getMessage(index);
+ }
+ }
+ public Builder setEntry(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) {
+ if (entryBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureEntryIsMutable();
+ entry_.set(index, value);
+ onChanged();
+ } else {
+ entryBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ public Builder setEntry(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) {
+ if (entryBuilder_ == null) {
+ ensureEntryIsMutable();
+ entry_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ entryBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) {
+ if (entryBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureEntryIsMutable();
+ entry_.add(value);
+ onChanged();
+ } else {
+ entryBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ public Builder addEntry(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) {
+ if (entryBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureEntryIsMutable();
+ entry_.add(index, value);
+ onChanged();
+ } else {
+ entryBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ public Builder addEntry(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) {
+ if (entryBuilder_ == null) {
+ ensureEntryIsMutable();
+ entry_.add(builderForValue.build());
+ onChanged();
+ } else {
+ entryBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addEntry(
+ int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) {
+ if (entryBuilder_ == null) {
+ ensureEntryIsMutable();
+ entry_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ entryBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ public Builder addAllEntry(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry> values) {
+ if (entryBuilder_ == null) {
+ ensureEntryIsMutable();
+ super.addAll(values, entry_);
+ onChanged();
+ } else {
+ entryBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ public Builder clearEntry() {
+ if (entryBuilder_ == null) {
+ entry_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ entryBuilder_.clear();
+ }
+ return this;
+ }
+ public Builder removeEntry(int index) {
+ if (entryBuilder_ == null) {
+ ensureEntryIsMutable();
+ entry_.remove(index);
+ onChanged();
+ } else {
+ entryBuilder_.remove(index);
+ }
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getEntryBuilder(
+ int index) {
+ return getEntryFieldBuilder().getBuilder(index);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder(
+ int index) {
+ if (entryBuilder_ == null) {
+ return entry_.get(index); } else {
+ return entryBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>
+ getEntryOrBuilderList() {
+ if (entryBuilder_ != null) {
+ return entryBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(entry_);
+ }
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder() {
+ return getEntryFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance());
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addEntryBuilder(
+ int index) {
+ return getEntryFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance());
+ }
+ public java.util.List
+ getEntryBuilderList() {
+ return getEntryFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>
+ getEntryFieldBuilder() {
+ if (entryBuilder_ == null) {
+ entryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>(
+ entry_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ entry_ = null;
+ }
+ return entryBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest)
+ }
+
+ static {
+ defaultInstance = new ReplicateWALEntryRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest)
+ }
+
+ public interface ReplicateWALEntryResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class ReplicateWALEntryResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ReplicateWALEntryResponseOrBuilder {
+ // Use ReplicateWALEntryResponse.newBuilder() to construct.
+ private ReplicateWALEntryResponse(Builder builder) {
+ super(builder);
+ }
+ private ReplicateWALEntryResponse(boolean noInit) {}
+
+ private static final ReplicateWALEntryResponse defaultInstance;
+ public static ReplicateWALEntryResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ReplicateWALEntryResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse)
+ }
+
+ static {
+ defaultInstance = new ReplicateWALEntryResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse)
+ }
+
+ public interface RollWALWriterRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class RollWALWriterRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements RollWALWriterRequestOrBuilder {
+ // Use RollWALWriterRequest.newBuilder() to construct.
+ private RollWALWriterRequest(Builder builder) {
+ super(builder);
+ }
+ private RollWALWriterRequest(boolean noInit) {}
+
+ private static final RollWALWriterRequest defaultInstance;
+ public static RollWALWriterRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RollWALWriterRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:RollWALWriterRequest)
+ }
+
+ static {
+ defaultInstance = new RollWALWriterRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:RollWALWriterRequest)
+ }
+
+ public interface RollWALWriterResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated bytes regionToFlush = 1;
+ java.util.List getRegionToFlushList();
+ int getRegionToFlushCount();
+ com.google.protobuf.ByteString getRegionToFlush(int index);
+ }
+ public static final class RollWALWriterResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements RollWALWriterResponseOrBuilder {
+ // Use RollWALWriterResponse.newBuilder() to construct.
+ private RollWALWriterResponse(Builder builder) {
+ super(builder);
+ }
+ private RollWALWriterResponse(boolean noInit) {}
+
+ private static final RollWALWriterResponse defaultInstance;
+ public static RollWALWriterResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RollWALWriterResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable;
+ }
+
+ // repeated bytes regionToFlush = 1;
+ public static final int REGIONTOFLUSH_FIELD_NUMBER = 1;
+ private java.util.List regionToFlush_;
+ public java.util.List
+ getRegionToFlushList() {
+ return regionToFlush_;
+ }
+ public int getRegionToFlushCount() {
+ return regionToFlush_.size();
+ }
+ public com.google.protobuf.ByteString getRegionToFlush(int index) {
+ return regionToFlush_.get(index);
+ }
+
+ private void initFields() {
+ regionToFlush_ = java.util.Collections.emptyList();;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < regionToFlush_.size(); i++) {
+ output.writeBytes(1, regionToFlush_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < regionToFlush_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(regionToFlush_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getRegionToFlushList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj;
+
+ boolean result = true;
+ result = result && getRegionToFlushList()
+ .equals(other.getRegionToFlushList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getRegionToFlushCount() > 0) {
+ hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER;
+ hash = (53 * hash) + getRegionToFlushList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ regionToFlush_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.regionToFlush_ = regionToFlush_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this;
+ if (!other.regionToFlush_.isEmpty()) {
+ if (regionToFlush_.isEmpty()) {
+ regionToFlush_ = other.regionToFlush_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureRegionToFlushIsMutable();
+ regionToFlush_.addAll(other.regionToFlush_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ ensureRegionToFlushIsMutable();
+ regionToFlush_.add(input.readBytes());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated bytes regionToFlush = 1;
+ private java.util.List regionToFlush_ = java.util.Collections.emptyList();;
+ private void ensureRegionToFlushIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ regionToFlush_ = new java.util.ArrayList(regionToFlush_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List
+ getRegionToFlushList() {
+ return java.util.Collections.unmodifiableList(regionToFlush_);
+ }
+ public int getRegionToFlushCount() {
+ return regionToFlush_.size();
+ }
+ public com.google.protobuf.ByteString getRegionToFlush(int index) {
+ return regionToFlush_.get(index);
+ }
+ public Builder setRegionToFlush(
+ int index, com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionToFlushIsMutable();
+ regionToFlush_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addRegionToFlush(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionToFlushIsMutable();
+ regionToFlush_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllRegionToFlush(
+ java.lang.Iterable extends com.google.protobuf.ByteString> values) {
+ ensureRegionToFlushIsMutable();
+ super.addAll(values, regionToFlush_);
+ onChanged();
+ return this;
+ }
+ public Builder clearRegionToFlush() {
+ regionToFlush_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:RollWALWriterResponse)
+ }
+
+ static {
+ defaultInstance = new RollWALWriterResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:RollWALWriterResponse)
+ }
+
+ public interface StopServerRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string reason = 1;
+ boolean hasReason();
+ String getReason();
+ }
+ public static final class StopServerRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements StopServerRequestOrBuilder {
+ // Use StopServerRequest.newBuilder() to construct.
+ private StopServerRequest(Builder builder) {
+ super(builder);
+ }
+ private StopServerRequest(boolean noInit) {}
+
+ private static final StopServerRequest defaultInstance;
+ public static StopServerRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public StopServerRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string reason = 1;
+ public static final int REASON_FIELD_NUMBER = 1;
+ private java.lang.Object reason_;
+ public boolean hasReason() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getReason() {
+ java.lang.Object ref = reason_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ reason_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getReasonBytes() {
+ java.lang.Object ref = reason_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ reason_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ reason_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasReason()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getReasonBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getReasonBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj;
+
+ boolean result = true;
+ result = result && (hasReason() == other.hasReason());
+ if (hasReason()) {
+ result = result && getReason()
+ .equals(other.getReason());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasReason()) {
+ hash = (37 * hash) + REASON_FIELD_NUMBER;
+ hash = (53 * hash) + getReason().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ reason_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.reason_ = reason_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this;
+ if (other.hasReason()) {
+ setReason(other.getReason());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasReason()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ reason_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string reason = 1;
+ private java.lang.Object reason_ = "";
+ public boolean hasReason() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getReason() {
+ java.lang.Object ref = reason_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ reason_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setReason(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ reason_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearReason() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ reason_ = getDefaultInstance().getReason();
+ onChanged();
+ return this;
+ }
+ void setReason(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ reason_ = value;
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:StopServerRequest)
+ }
+
+ static {
+ defaultInstance = new StopServerRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:StopServerRequest)
+ }
+
+ public interface StopServerResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class StopServerResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements StopServerResponseOrBuilder {
+ // Use StopServerResponse.newBuilder() to construct.
+ private StopServerResponse(Builder builder) {
+ super(builder);
+ }
+ private StopServerResponse(boolean noInit) {}
+
+ private static final StopServerResponse defaultInstance;
+ public static StopServerResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public StopServerResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:StopServerResponse)
+ }
+
+ static {
+ defaultInstance = new StopServerResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:StopServerResponse)
+ }
+
+ public interface GetServerInfoRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class GetServerInfoRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetServerInfoRequestOrBuilder {
+ // Use GetServerInfoRequest.newBuilder() to construct.
+ private GetServerInfoRequest(Builder builder) {
+ super(builder);
+ }
+ private GetServerInfoRequest(boolean noInit) {}
+
+ private static final GetServerInfoRequest defaultInstance;
+ public static GetServerInfoRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetServerInfoRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:GetServerInfoRequest)
+ }
+
+ static {
+ defaultInstance = new GetServerInfoRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetServerInfoRequest)
+ }
+
+ public interface ServerInfoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .ServerName serverName = 1;
+ boolean hasServerName();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder();
+
+ // optional uint32 webuiPort = 2;
+ boolean hasWebuiPort();
+ int getWebuiPort();
+ }
+ public static final class ServerInfo extends
+ com.google.protobuf.GeneratedMessage
+ implements ServerInfoOrBuilder {
+ // Use ServerInfo.newBuilder() to construct.
+ private ServerInfo(Builder builder) {
+ super(builder);
+ }
+ private ServerInfo(boolean noInit) {}
+
+ private static final ServerInfo defaultInstance;
+ public static ServerInfo getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ServerInfo getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .ServerName serverName = 1;
+ public static final int SERVERNAME_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_;
+ public boolean hasServerName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
+ return serverName_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
+ return serverName_;
+ }
+
+ // optional uint32 webuiPort = 2;
+ public static final int WEBUIPORT_FIELD_NUMBER = 2;
+ private int webuiPort_;
+ public boolean hasWebuiPort() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getWebuiPort() {
+ return webuiPort_;
+ }
+
+ private void initFields() {
+ serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ webuiPort_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasServerName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getServerName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, serverName_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt32(2, webuiPort_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, serverName_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(2, webuiPort_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) obj;
+
+ boolean result = true;
+ result = result && (hasServerName() == other.hasServerName());
+ if (hasServerName()) {
+ result = result && getServerName()
+ .equals(other.getServerName());
+ }
+ result = result && (hasWebuiPort() == other.hasWebuiPort());
+ if (hasWebuiPort()) {
+ result = result && (getWebuiPort()
+ == other.getWebuiPort());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasServerName()) {
+ hash = (37 * hash) + SERVERNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getServerName().hashCode();
+ }
+ if (hasWebuiPort()) {
+ hash = (37 * hash) + WEBUIPORT_FIELD_NUMBER;
+ hash = (53 * hash) + getWebuiPort();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ServerInfo_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getServerNameFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (serverNameBuilder_ == null) {
+ serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ } else {
+ serverNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ webuiPort_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (serverNameBuilder_ == null) {
+ result.serverName_ = serverName_;
+ } else {
+ result.serverName_ = serverNameBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.webuiPort_ = webuiPort_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) return this;
+ if (other.hasServerName()) {
+ mergeServerName(other.getServerName());
+ }
+ if (other.hasWebuiPort()) {
+ setWebuiPort(other.getWebuiPort());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasServerName()) {
+
+ return false;
+ }
+ if (!getServerName().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder();
+ if (hasServerName()) {
+ subBuilder.mergeFrom(getServerName());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setServerName(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ webuiPort_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .ServerName serverName = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_;
+ public boolean hasServerName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
+ if (serverNameBuilder_ == null) {
+ return serverName_;
+ } else {
+ return serverNameBuilder_.getMessage();
+ }
+ }
+ public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
+ if (serverNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ serverName_ = value;
+ onChanged();
+ } else {
+ serverNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setServerName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
+ if (serverNameBuilder_ == null) {
+ serverName_ = builderForValue.build();
+ onChanged();
+ } else {
+ serverNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
+ if (serverNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
+ serverName_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial();
+ } else {
+ serverName_ = value;
+ }
+ onChanged();
+ } else {
+ serverNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearServerName() {
+ if (serverNameBuilder_ == null) {
+ serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+ onChanged();
+ } else {
+ serverNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getServerNameFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
+ if (serverNameBuilder_ != null) {
+ return serverNameBuilder_.getMessageOrBuilder();
+ } else {
+ return serverName_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
+ getServerNameFieldBuilder() {
+ if (serverNameBuilder_ == null) {
+ serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
+ serverName_,
+ getParentForChildren(),
+ isClean());
+ serverName_ = null;
+ }
+ return serverNameBuilder_;
+ }
+
+ // optional uint32 webuiPort = 2;
+ private int webuiPort_ ;
+ public boolean hasWebuiPort() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getWebuiPort() {
+ return webuiPort_;
+ }
+ public Builder setWebuiPort(int value) {
+ bitField0_ |= 0x00000002;
+ webuiPort_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearWebuiPort() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ webuiPort_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ServerInfo)
+ }
+
+ static {
+ defaultInstance = new ServerInfo(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ServerInfo)
+ }
+
+ public interface GetServerInfoResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .ServerInfo serverInfo = 1;
+ boolean hasServerInfo();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo();
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder();
+ }
+ public static final class GetServerInfoResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetServerInfoResponseOrBuilder {
+ // Use GetServerInfoResponse.newBuilder() to construct.
+ private GetServerInfoResponse(Builder builder) {
+ super(builder);
+ }
+ private GetServerInfoResponse(boolean noInit) {}
+
+ private static final GetServerInfoResponse defaultInstance;
+ public static GetServerInfoResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetServerInfoResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required .ServerInfo serverInfo = 1;
+ public static final int SERVERINFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_;
+ public boolean hasServerInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() {
+ return serverInfo_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() {
+ return serverInfo_;
+ }
+
+ private void initFields() {
+ serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasServerInfo()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getServerInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, serverInfo_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, serverInfo_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj;
+
+ boolean result = true;
+ result = result && (hasServerInfo() == other.hasServerInfo());
+ if (hasServerInfo()) {
+ result = result && getServerInfo()
+ .equals(other.getServerInfo());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasServerInfo()) {
+ hash = (37 * hash) + SERVERINFO_FIELD_NUMBER;
+ hash = (53 * hash) + getServerInfo().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getServerInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (serverInfoBuilder_ == null) {
+ serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
+ } else {
+ serverInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (serverInfoBuilder_ == null) {
+ result.serverInfo_ = serverInfo_;
+ } else {
+ result.serverInfo_ = serverInfoBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this;
+ if (other.hasServerInfo()) {
+ mergeServerInfo(other.getServerInfo());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasServerInfo()) {
+
+ return false;
+ }
+ if (!getServerInfo().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder();
+ if (hasServerInfo()) {
+ subBuilder.mergeFrom(getServerInfo());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setServerInfo(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .ServerInfo serverInfo = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_;
+ public boolean hasServerInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo() {
+ if (serverInfoBuilder_ == null) {
+ return serverInfo_;
+ } else {
+ return serverInfoBuilder_.getMessage();
+ }
+ }
+ public Builder setServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) {
+ if (serverInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ serverInfo_ = value;
+ onChanged();
+ } else {
+ serverInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder setServerInfo(
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder builderForValue) {
+ if (serverInfoBuilder_ == null) {
+ serverInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ serverInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder mergeServerInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo value) {
+ if (serverInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ serverInfo_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) {
+ serverInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.newBuilder(serverInfo_).mergeFrom(value).buildPartial();
+ } else {
+ serverInfo_ = value;
+ }
+ onChanged();
+ } else {
+ serverInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ public Builder clearServerInfo() {
+ if (serverInfoBuilder_ == null) {
+ serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
+ onChanged();
+ } else {
+ serverInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder getServerInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getServerInfoFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() {
+ if (serverInfoBuilder_ != null) {
+ return serverInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return serverInfo_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder>
+ getServerInfoFieldBuilder() {
+ if (serverInfoBuilder_ == null) {
+ serverInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder>(
+ serverInfo_,
+ getParentForChildren(),
+ isClean());
+ serverInfo_ = null;
+ }
+ return serverInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:GetServerInfoResponse)
+ }
+
+ static {
+ defaultInstance = new GetServerInfoResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:GetServerInfoResponse)
+ }
+
+ public static abstract class AdminService
+ implements com.google.protobuf.Service {
+ protected AdminService() {}
+
+ public interface Interface {
+ public abstract void getRegionInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getStoreFile(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getOnlineRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void openRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void closeRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void flushRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void splitRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void compactRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void replicateWALEntry(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void rollWALWriter(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getServerInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void stopServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new AdminService() {
+ @java.lang.Override
+ public void getRegionInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getRegionInfo(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getStoreFile(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getStoreFile(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getOnlineRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getOnlineRegion(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void openRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.openRegion(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void closeRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.closeRegion(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void flushRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.flushRegion(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void splitRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.splitRegion(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void compactRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.compactRegion(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void replicateWALEntry(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.replicateWALEntry(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void rollWALWriter(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.rollWALWriter(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getServerInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getServerInfo(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void stopServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.stopServer(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request);
+ case 1:
+ return impl.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request);
+ case 2:
+ return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request);
+ case 3:
+ return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request);
+ case 4:
+ return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request);
+ case 5:
+ return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request);
+ case 6:
+ return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request);
+ case 7:
+ return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request);
+ case 8:
+ return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request);
+ case 9:
+ return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request);
+ case 10:
+ return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request);
+ case 11:
+ return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ public abstract void getRegionInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getStoreFile(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getOnlineRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void openRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void closeRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void flushRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void splitRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void compactRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void replicateWALEntry(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void rollWALWriter(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getServerInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void stopServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.getStoreFile(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 2:
+ this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 3:
+ this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 4:
+ this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 5:
+ this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 6:
+ this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 7:
+ this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 8:
+ this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 9:
+ this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 10:
+ this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 11:
+ this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void getRegionInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()));
+ }
+
+ public void getStoreFile(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()));
+ }
+
+ public void getOnlineRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()));
+ }
+
+ public void openRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()));
+ }
+
+ public void closeRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(4),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()));
+ }
+
+ public void flushRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(5),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()));
+ }
+
+ public void splitRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(6),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()));
+ }
+
+ public void compactRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(7),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()));
+ }
+
+ public void replicateWALEntry(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(8),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()));
+ }
+
+ public void rollWALWriter(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()));
+ }
+
+ public void getServerInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(10),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()));
+ }
+
+ public void stopServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(11),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getStoreFile(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(4),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(5),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(6),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(7),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(8),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(10),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(11),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance());
+ }
+
+ }
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetRegionInfoRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetRegionInfoRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetRegionInfoResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetRegionInfoResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetStoreFileRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetStoreFileRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetStoreFileResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetStoreFileResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetOnlineRegionRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetOnlineRegionRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetOnlineRegionResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetOnlineRegionResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_OpenRegionRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_OpenRegionRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_OpenRegionRequest_RegionOpenInfo_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_OpenRegionResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_OpenRegionResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_CloseRegionRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_CloseRegionRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_CloseRegionResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_CloseRegionResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_FlushRegionRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_FlushRegionRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_FlushRegionResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_FlushRegionResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_SplitRegionRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_SplitRegionRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_SplitRegionResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_SplitRegionResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_CompactRegionRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_CompactRegionRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_CompactRegionResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_CompactRegionResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_UUID_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_UUID_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_WALEntry_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_WALEntry_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_WALEntry_WALKey_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_WALEntry_WALKey_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_WALEntry_WALEdit_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_WALEntry_WALEdit_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_WALEntry_WALEdit_FamilyScope_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_ReplicateWALEntryRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_ReplicateWALEntryRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_ReplicateWALEntryResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_ReplicateWALEntryResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_RollWALWriterRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_RollWALWriterRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_RollWALWriterResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_RollWALWriterResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_StopServerRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_StopServerRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_StopServerResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_StopServerResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetServerInfoRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetServerInfoRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_ServerInfo_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_ServerInfo_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_GetServerInfoResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_GetServerInfoResponse_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\013Admin.proto\032\013hbase.proto\"Q\n\024GetRegionI" +
+ "nfoRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" +
+ "fier\022\027\n\017compactionState\030\002 \001(\010\"\301\001\n\025GetReg" +
+ "ionInfoResponse\022\037\n\nregionInfo\030\001 \002(\0132\013.Re" +
+ "gionInfo\022?\n\017compactionState\030\002 \001(\0162&.GetR" +
+ "egionInfoResponse.CompactionState\"F\n\017Com" +
+ "pactionState\022\010\n\004NONE\020\000\022\t\n\005MINOR\020\001\022\t\n\005MAJ" +
+ "OR\020\002\022\023\n\017MAJOR_AND_MINOR\020\003\"G\n\023GetStoreFil" +
+ "eRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" +
+ "er\022\016\n\006family\030\002 \003(\014\")\n\024GetStoreFileRespon",
+ "se\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetOnlineRegion" +
+ "Request\":\n\027GetOnlineRegionResponse\022\037\n\nre" +
+ "gionInfo\030\001 \003(\0132\013.RegionInfo\"\225\001\n\021OpenRegi" +
+ "onRequest\0223\n\010openInfo\030\001 \003(\0132!.OpenRegion" +
+ "Request.RegionOpenInfo\032K\n\016RegionOpenInfo" +
+ "\022\033\n\006region\030\001 \002(\0132\013.RegionInfo\022\034\n\024version" +
+ "OfOfflineNode\030\002 \001(\r\"\234\001\n\022OpenRegionRespon" +
+ "se\022<\n\014openingState\030\001 \003(\0162&.OpenRegionRes" +
+ "ponse.RegionOpeningState\"H\n\022RegionOpenin" +
+ "gState\022\n\n\006OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022",
+ "\n\016FAILED_OPENING\020\002\"\232\001\n\022CloseRegionReques" +
+ "t\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024v" +
+ "ersionOfClosingNode\030\002 \001(\r\022\034\n\016transitionI" +
+ "nZK\030\003 \001(\010:\004true\022&\n\021destinationServer\030\004 \001" +
+ "(\0132\013.ServerName\"%\n\023CloseRegionResponse\022\016" +
+ "\n\006closed\030\001 \002(\010\"M\n\022FlushRegionRequest\022 \n\006" +
+ "region\030\001 \002(\0132\020.RegionSpecifier\022\025\n\rifOlde" +
+ "rThanTs\030\002 \001(\004\"=\n\023FlushRegionResponse\022\025\n\r" +
+ "lastFlushTime\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022" +
+ "SplitRegionRequest\022 \n\006region\030\001 \002(\0132\020.Reg",
+ "ionSpecifier\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023Spli" +
+ "tRegionResponse\"W\n\024CompactRegionRequest\022" +
+ " \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\r\n\005maj" +
+ "or\030\002 \001(\010\022\016\n\006family\030\003 \001(\014\"\027\n\025CompactRegio" +
+ "nResponse\"1\n\004UUID\022\024\n\014leastSigBits\030\001 \002(\004\022" +
+ "\023\n\013mostSigBits\030\002 \002(\004\"\270\003\n\010WALEntry\022\035\n\003key" +
+ "\030\001 \002(\0132\020.WALEntry.WALKey\022\037\n\004edit\030\002 \002(\0132\021" +
+ ".WALEntry.WALEdit\032~\n\006WALKey\022\031\n\021encodedRe" +
+ "gionName\030\001 \002(\014\022\021\n\ttableName\030\002 \002(\014\022\031\n\021log" +
+ "SequenceNumber\030\003 \002(\004\022\021\n\twriteTime\030\004 \002(\004\022",
+ "\030\n\tclusterId\030\005 \001(\0132\005.UUID\032\353\001\n\007WALEdit\022\025\n" +
+ "\rkeyValueBytes\030\001 \003(\014\0222\n\013familyScope\030\002 \003(" +
+ "\0132\035.WALEntry.WALEdit.FamilyScope\032M\n\013Fami" +
+ "lyScope\022\016\n\006family\030\001 \002(\014\022.\n\tscopeType\030\002 \002" +
+ "(\0162\033.WALEntry.WALEdit.ScopeType\"F\n\tScope" +
+ "Type\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030REP" +
+ "LICATION_SCOPE_GLOBAL\020\001\"4\n\030ReplicateWALE" +
+ "ntryRequest\022\030\n\005entry\030\001 \003(\0132\t.WALEntry\"\033\n" +
+ "\031ReplicateWALEntryResponse\"\026\n\024RollWALWri" +
+ "terRequest\".\n\025RollWALWriterResponse\022\025\n\rr",
+ "egionToFlush\030\001 \003(\014\"#\n\021StopServerRequest\022" +
+ "\016\n\006reason\030\001 \002(\t\"\024\n\022StopServerResponse\"\026\n" +
+ "\024GetServerInfoRequest\"@\n\nServerInfo\022\037\n\ns" +
+ "erverName\030\001 \002(\0132\013.ServerName\022\021\n\twebuiPor" +
+ "t\030\002 \001(\r\"8\n\025GetServerInfoResponse\022\037\n\nserv" +
+ "erInfo\030\001 \002(\0132\013.ServerInfo2\371\005\n\014AdminServi" +
+ "ce\022>\n\rgetRegionInfo\022\025.GetRegionInfoReque" +
+ "st\032\026.GetRegionInfoResponse\022;\n\014getStoreFi" +
+ "le\022\024.GetStoreFileRequest\032\025.GetStoreFileR" +
+ "esponse\022D\n\017getOnlineRegion\022\027.GetOnlineRe",
+ "gionRequest\032\030.GetOnlineRegionResponse\0225\n" +
+ "\nopenRegion\022\022.OpenRegionRequest\032\023.OpenRe" +
+ "gionResponse\0228\n\013closeRegion\022\023.CloseRegio" +
+ "nRequest\032\024.CloseRegionResponse\0228\n\013flushR" +
+ "egion\022\023.FlushRegionRequest\032\024.FlushRegion" +
+ "Response\0228\n\013splitRegion\022\023.SplitRegionReq" +
+ "uest\032\024.SplitRegionResponse\022>\n\rcompactReg" +
+ "ion\022\025.CompactRegionRequest\032\026.CompactRegi" +
+ "onResponse\022J\n\021replicateWALEntry\022\031.Replic" +
+ "ateWALEntryRequest\032\032.ReplicateWALEntryRe",
+ "sponse\022>\n\rrollWALWriter\022\025.RollWALWriterR" +
+ "equest\032\026.RollWALWriterResponse\022>\n\rgetSer" +
+ "verInfo\022\025.GetServerInfoRequest\032\026.GetServ" +
+ "erInfoResponse\0225\n\nstopServer\022\022.StopServe" +
+ "rRequest\032\023.StopServerResponseBA\n*org.apa" +
+ "che.hadoop.hbase.protobuf.generatedB\013Adm" +
+ "inProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_GetRegionInfoRequest_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_GetRegionInfoRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetRegionInfoRequest_descriptor,
+ new java.lang.String[] { "Region", "CompactionState", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class);
+ internal_static_GetRegionInfoResponse_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_GetRegionInfoResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetRegionInfoResponse_descriptor,
+ new java.lang.String[] { "RegionInfo", "CompactionState", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class);
+ internal_static_GetStoreFileRequest_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_GetStoreFileRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetStoreFileRequest_descriptor,
+ new java.lang.String[] { "Region", "Family", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class);
+ internal_static_GetStoreFileResponse_descriptor =
+ getDescriptor().getMessageTypes().get(3);
+ internal_static_GetStoreFileResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetStoreFileResponse_descriptor,
+ new java.lang.String[] { "StoreFile", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class);
+ internal_static_GetOnlineRegionRequest_descriptor =
+ getDescriptor().getMessageTypes().get(4);
+ internal_static_GetOnlineRegionRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetOnlineRegionRequest_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class);
+ internal_static_GetOnlineRegionResponse_descriptor =
+ getDescriptor().getMessageTypes().get(5);
+ internal_static_GetOnlineRegionResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetOnlineRegionResponse_descriptor,
+ new java.lang.String[] { "RegionInfo", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class);
+ internal_static_OpenRegionRequest_descriptor =
+ getDescriptor().getMessageTypes().get(6);
+ internal_static_OpenRegionRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_OpenRegionRequest_descriptor,
+ new java.lang.String[] { "OpenInfo", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class);
+ internal_static_OpenRegionRequest_RegionOpenInfo_descriptor =
+ internal_static_OpenRegionRequest_descriptor.getNestedTypes().get(0);
+ internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_OpenRegionRequest_RegionOpenInfo_descriptor,
+ new java.lang.String[] { "Region", "VersionOfOfflineNode", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class);
+ internal_static_OpenRegionResponse_descriptor =
+ getDescriptor().getMessageTypes().get(7);
+ internal_static_OpenRegionResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_OpenRegionResponse_descriptor,
+ new java.lang.String[] { "OpeningState", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class);
+ internal_static_CloseRegionRequest_descriptor =
+ getDescriptor().getMessageTypes().get(8);
+ internal_static_CloseRegionRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_CloseRegionRequest_descriptor,
+ new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class);
+ internal_static_CloseRegionResponse_descriptor =
+ getDescriptor().getMessageTypes().get(9);
+ internal_static_CloseRegionResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_CloseRegionResponse_descriptor,
+ new java.lang.String[] { "Closed", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class);
+ internal_static_FlushRegionRequest_descriptor =
+ getDescriptor().getMessageTypes().get(10);
+ internal_static_FlushRegionRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_FlushRegionRequest_descriptor,
+ new java.lang.String[] { "Region", "IfOlderThanTs", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class);
+ internal_static_FlushRegionResponse_descriptor =
+ getDescriptor().getMessageTypes().get(11);
+ internal_static_FlushRegionResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_FlushRegionResponse_descriptor,
+ new java.lang.String[] { "LastFlushTime", "Flushed", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class);
+ internal_static_SplitRegionRequest_descriptor =
+ getDescriptor().getMessageTypes().get(12);
+ internal_static_SplitRegionRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_SplitRegionRequest_descriptor,
+ new java.lang.String[] { "Region", "SplitPoint", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class);
+ internal_static_SplitRegionResponse_descriptor =
+ getDescriptor().getMessageTypes().get(13);
+ internal_static_SplitRegionResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_SplitRegionResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class);
+ internal_static_CompactRegionRequest_descriptor =
+ getDescriptor().getMessageTypes().get(14);
+ internal_static_CompactRegionRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_CompactRegionRequest_descriptor,
+ new java.lang.String[] { "Region", "Major", "Family", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class);
+ internal_static_CompactRegionResponse_descriptor =
+ getDescriptor().getMessageTypes().get(15);
+ internal_static_CompactRegionResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_CompactRegionResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class);
+ internal_static_UUID_descriptor =
+ getDescriptor().getMessageTypes().get(16);
+ internal_static_UUID_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_UUID_descriptor,
+ new java.lang.String[] { "LeastSigBits", "MostSigBits", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class);
+ internal_static_WALEntry_descriptor =
+ getDescriptor().getMessageTypes().get(17);
+ internal_static_WALEntry_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_WALEntry_descriptor,
+ new java.lang.String[] { "Key", "Edit", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class);
+ internal_static_WALEntry_WALKey_descriptor =
+ internal_static_WALEntry_descriptor.getNestedTypes().get(0);
+ internal_static_WALEntry_WALKey_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_WALEntry_WALKey_descriptor,
+ new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class);
+ internal_static_WALEntry_WALEdit_descriptor =
+ internal_static_WALEntry_descriptor.getNestedTypes().get(1);
+ internal_static_WALEntry_WALEdit_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_WALEntry_WALEdit_descriptor,
+ new java.lang.String[] { "KeyValueBytes", "FamilyScope", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class);
+ internal_static_WALEntry_WALEdit_FamilyScope_descriptor =
+ internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0);
+ internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_WALEntry_WALEdit_FamilyScope_descriptor,
+ new java.lang.String[] { "Family", "ScopeType", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class);
+ internal_static_ReplicateWALEntryRequest_descriptor =
+ getDescriptor().getMessageTypes().get(18);
+ internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_ReplicateWALEntryRequest_descriptor,
+ new java.lang.String[] { "Entry", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class);
+ internal_static_ReplicateWALEntryResponse_descriptor =
+ getDescriptor().getMessageTypes().get(19);
+ internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_ReplicateWALEntryResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class);
+ internal_static_RollWALWriterRequest_descriptor =
+ getDescriptor().getMessageTypes().get(20);
+ internal_static_RollWALWriterRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_RollWALWriterRequest_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class);
+ internal_static_RollWALWriterResponse_descriptor =
+ getDescriptor().getMessageTypes().get(21);
+ internal_static_RollWALWriterResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_RollWALWriterResponse_descriptor,
+ new java.lang.String[] { "RegionToFlush", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class);
+ internal_static_StopServerRequest_descriptor =
+ getDescriptor().getMessageTypes().get(22);
+ internal_static_StopServerRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_StopServerRequest_descriptor,
+ new java.lang.String[] { "Reason", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class);
+ internal_static_StopServerResponse_descriptor =
+ getDescriptor().getMessageTypes().get(23);
+ internal_static_StopServerResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_StopServerResponse_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class);
+ internal_static_GetServerInfoRequest_descriptor =
+ getDescriptor().getMessageTypes().get(24);
+ internal_static_GetServerInfoRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetServerInfoRequest_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class);
+ internal_static_ServerInfo_descriptor =
+ getDescriptor().getMessageTypes().get(25);
+ internal_static_ServerInfo_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_ServerInfo_descriptor,
+ new java.lang.String[] { "ServerName", "WebuiPort", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class);
+ internal_static_GetServerInfoResponse_descriptor =
+ getDescriptor().getMessageTypes().get(26);
+ internal_static_GetServerInfoResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_GetServerInfoResponse_descriptor,
+ new java.lang.String[] { "ServerInfo", },
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
new file mode 100644
index 0000000..ad1dbeb
--- /dev/null
+++ hbase-rpc/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
@@ -0,0 +1,1829 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Aggregate.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class AggregateProtos {
+ private AggregateProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface AggregateArgumentOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string interpreterClassName = 1;
+ boolean hasInterpreterClassName();
+ String getInterpreterClassName();
+
+ // required .Scan scan = 2;
+ boolean hasScan();
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
+
+ // optional bytes interpreterSpecificBytes = 3;
+ boolean hasInterpreterSpecificBytes();
+ com.google.protobuf.ByteString getInterpreterSpecificBytes();
+ }
+ public static final class AggregateArgument extends
+ com.google.protobuf.GeneratedMessage
+ implements AggregateArgumentOrBuilder {
+ // Use AggregateArgument.newBuilder() to construct.
+ private AggregateArgument(Builder builder) {
+ super(builder);
+ }
+ private AggregateArgument(boolean noInit) {}
+
+ private static final AggregateArgument defaultInstance;
+ public static AggregateArgument getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AggregateArgument getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string interpreterClassName = 1;
+ public static final int INTERPRETERCLASSNAME_FIELD_NUMBER = 1;
+ private java.lang.Object interpreterClassName_;
+ public boolean hasInterpreterClassName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getInterpreterClassName() {
+ java.lang.Object ref = interpreterClassName_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ interpreterClassName_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getInterpreterClassNameBytes() {
+ java.lang.Object ref = interpreterClassName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ interpreterClassName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required .Scan scan = 2;
+ public static final int SCAN_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ return scan_;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ return scan_;
+ }
+
+ // optional bytes interpreterSpecificBytes = 3;
+ public static final int INTERPRETERSPECIFICBYTES_FIELD_NUMBER = 3;
+ private com.google.protobuf.ByteString interpreterSpecificBytes_;
+ public boolean hasInterpreterSpecificBytes() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
+ return interpreterSpecificBytes_;
+ }
+
+ private void initFields() {
+ interpreterClassName_ = "";
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasInterpreterClassName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasScan()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getInterpreterClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, scan_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, interpreterSpecificBytes_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getInterpreterClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, scan_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, interpreterSpecificBytes_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) obj;
+
+ boolean result = true;
+ result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
+ if (hasInterpreterClassName()) {
+ result = result && getInterpreterClassName()
+ .equals(other.getInterpreterClassName());
+ }
+ result = result && (hasScan() == other.hasScan());
+ if (hasScan()) {
+ result = result && getScan()
+ .equals(other.getScan());
+ }
+ result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
+ if (hasInterpreterSpecificBytes()) {
+ result = result && getInterpreterSpecificBytes()
+ .equals(other.getInterpreterSpecificBytes());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasInterpreterClassName()) {
+ hash = (37 * hash) + INTERPRETERCLASSNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getInterpreterClassName().hashCode();
+ }
+ if (hasScan()) {
+ hash = (37 * hash) + SCAN_FIELD_NUMBER;
+ hash = (53 * hash) + getScan().hashCode();
+ }
+ if (hasInterpreterSpecificBytes()) {
+ hash = (37 * hash) + INTERPRETERSPECIFICBYTES_FIELD_NUMBER;
+ hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgumentOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getScanFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ interpreterClassName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (scanBuilder_ == null) {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument build() {
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.interpreterClassName_ = interpreterClassName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (scanBuilder_ == null) {
+ result.scan_ = scan_;
+ } else {
+ result.scan_ = scanBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.interpreterSpecificBytes_ = interpreterSpecificBytes_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance()) return this;
+ if (other.hasInterpreterClassName()) {
+ setInterpreterClassName(other.getInterpreterClassName());
+ }
+ if (other.hasScan()) {
+ mergeScan(other.getScan());
+ }
+ if (other.hasInterpreterSpecificBytes()) {
+ setInterpreterSpecificBytes(other.getInterpreterSpecificBytes());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasInterpreterClassName()) {
+
+ return false;
+ }
+ if (!hasScan()) {
+
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ interpreterClassName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder();
+ if (hasScan()) {
+ subBuilder.mergeFrom(getScan());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setScan(subBuilder.buildPartial());
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ interpreterSpecificBytes_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string interpreterClassName = 1;
+ private java.lang.Object interpreterClassName_ = "";
+ public boolean hasInterpreterClassName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getInterpreterClassName() {
+ java.lang.Object ref = interpreterClassName_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ interpreterClassName_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setInterpreterClassName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ interpreterClassName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearInterpreterClassName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
+ onChanged();
+ return this;
+ }
+ void setInterpreterClassName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ interpreterClassName_ = value;
+ onChanged();
+ }
+
+ // required .Scan scan = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ if (scanBuilder_ == null) {
+ return scan_;
+ } else {
+ return scanBuilder_.getMessage();
+ }
+ }
+ public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scan_ = value;
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder setScan(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
+ if (scanBuilder_ == null) {
+ scan_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
+ scan_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
+ } else {
+ scan_ = value;
+ }
+ onChanged();
+ } else {
+ scanBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ public Builder clearScan() {
+ if (scanBuilder_ == null) {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ onChanged();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getScanFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ if (scanBuilder_ != null) {
+ return scanBuilder_.getMessageOrBuilder();
+ } else {
+ return scan_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
+ getScanFieldBuilder() {
+ if (scanBuilder_ == null) {
+ scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
+ scan_,
+ getParentForChildren(),
+ isClean());
+ scan_ = null;
+ }
+ return scanBuilder_;
+ }
+
+ // optional bytes interpreterSpecificBytes = 3;
+ private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasInterpreterSpecificBytes() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
+ return interpreterSpecificBytes_;
+ }
+ public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ interpreterSpecificBytes_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearInterpreterSpecificBytes() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:AggregateArgument)
+ }
+
+ static {
+ defaultInstance = new AggregateArgument(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:AggregateArgument)
+ }
+
+ public interface AggregateResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated bytes firstPart = 1;
+ java.util.List getFirstPartList();
+ int getFirstPartCount();
+ com.google.protobuf.ByteString getFirstPart(int index);
+
+ // optional bytes secondPart = 2;
+ boolean hasSecondPart();
+ com.google.protobuf.ByteString getSecondPart();
+ }
+ public static final class AggregateResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements AggregateResponseOrBuilder {
+ // Use AggregateResponse.newBuilder() to construct.
+ private AggregateResponse(Builder builder) {
+ super(builder);
+ }
+ private AggregateResponse(boolean noInit) {}
+
+ private static final AggregateResponse defaultInstance;
+ public static AggregateResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AggregateResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // repeated bytes firstPart = 1;
+ public static final int FIRSTPART_FIELD_NUMBER = 1;
+ private java.util.List firstPart_;
+ public java.util.List
+ getFirstPartList() {
+ return firstPart_;
+ }
+ public int getFirstPartCount() {
+ return firstPart_.size();
+ }
+ public com.google.protobuf.ByteString getFirstPart(int index) {
+ return firstPart_.get(index);
+ }
+
+ // optional bytes secondPart = 2;
+ public static final int SECONDPART_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString secondPart_;
+ public boolean hasSecondPart() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getSecondPart() {
+ return secondPart_;
+ }
+
+ private void initFields() {
+ firstPart_ = java.util.Collections.emptyList();;
+ secondPart_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < firstPart_.size(); i++) {
+ output.writeBytes(1, firstPart_.get(i));
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(2, secondPart_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < firstPart_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(firstPart_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getFirstPartList().size();
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, secondPart_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj;
+
+ boolean result = true;
+ result = result && getFirstPartList()
+ .equals(other.getFirstPartList());
+ result = result && (hasSecondPart() == other.hasSecondPart());
+ if (hasSecondPart()) {
+ result = result && getSecondPart()
+ .equals(other.getSecondPart());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getFirstPartCount() > 0) {
+ hash = (37 * hash) + FIRSTPART_FIELD_NUMBER;
+ hash = (53 * hash) + getFirstPartList().hashCode();
+ }
+ if (hasSecondPart()) {
+ hash = (37 * hash) + SECONDPART_FIELD_NUMBER;
+ hash = (53 * hash) + getSecondPart().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ firstPart_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ secondPart_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ firstPart_ = java.util.Collections.unmodifiableList(firstPart_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.firstPart_ = firstPart_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.secondPart_ = secondPart_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this;
+ if (!other.firstPart_.isEmpty()) {
+ if (firstPart_.isEmpty()) {
+ firstPart_ = other.firstPart_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureFirstPartIsMutable();
+ firstPart_.addAll(other.firstPart_);
+ }
+ onChanged();
+ }
+ if (other.hasSecondPart()) {
+ setSecondPart(other.getSecondPart());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ ensureFirstPartIsMutable();
+ firstPart_.add(input.readBytes());
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ secondPart_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated bytes firstPart = 1;
+ private java.util.List firstPart_ = java.util.Collections.emptyList();;
+ private void ensureFirstPartIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ firstPart_ = new java.util.ArrayList(firstPart_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List
+ getFirstPartList() {
+ return java.util.Collections.unmodifiableList(firstPart_);
+ }
+ public int getFirstPartCount() {
+ return firstPart_.size();
+ }
+ public com.google.protobuf.ByteString getFirstPart(int index) {
+ return firstPart_.get(index);
+ }
+ public Builder setFirstPart(
+ int index, com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFirstPartIsMutable();
+ firstPart_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addFirstPart(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFirstPartIsMutable();
+ firstPart_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllFirstPart(
+ java.lang.Iterable extends com.google.protobuf.ByteString> values) {
+ ensureFirstPartIsMutable();
+ super.addAll(values, firstPart_);
+ onChanged();
+ return this;
+ }
+ public Builder clearFirstPart() {
+ firstPart_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // optional bytes secondPart = 2;
+ private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasSecondPart() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getSecondPart() {
+ return secondPart_;
+ }
+ public Builder setSecondPart(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ secondPart_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearSecondPart() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ secondPart_ = getDefaultInstance().getSecondPart();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:AggregateResponse)
+ }
+
+ static {
+ defaultInstance = new AggregateResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:AggregateResponse)
+ }
+
+ public static abstract class AggregateService
+ implements com.google.protobuf.Service {
+ protected AggregateService() {}
+
+ public interface Interface {
+ public abstract void getMax(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getMin(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getSum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getRowNum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getAvg(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getStd(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getMedian(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new AggregateService() {
+ @java.lang.Override
+ public void getMax(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getMax(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getMin(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getMin(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getSum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getSum(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getRowNum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getRowNum(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getAvg(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getAvg(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getStd(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getStd(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getMedian(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getMedian(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ case 1:
+ return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ case 2:
+ return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ case 3:
+ return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ case 4:
+ return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ case 5:
+ return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ case 6:
+ return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ public abstract void getMax(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getMin(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getSum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getRowNum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getAvg(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getStd(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void getMedian(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+ com.google.protobuf.RpcUtil.